blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
d545cb848a6b78a3ded37a323d261caa2100f86c
Rust
tuzz/game-engine
/src/systems/model_loader.rs
UTF-8
6,885
2.578125
3
[]
no_license
use specs::prelude::*; use tobj::{load_obj_buf, load_mtl_buf}; use std::collections::HashMap; use std::io::BufReader; use crate::resources::*; use crate::components::*; pub struct ModelLoader; #[derive(SystemData)] pub struct SysData<'a> { entities: Entities<'a>, model_groups: Write<'a, ModelGroups>, models_to_load: WriteStorage<'a, ModelsToLoad>, file_contents: WriteStorage<'a, FileContent>, buffer_datas: WriteStorage<'a, BufferData>, dimensions: WriteStorage<'a, Dimensions>, names: WriteStorage<'a, Name>, ambients: WriteStorage<'a, Ambient>, diffuses: WriteStorage<'a, Diffuse>, speculars: WriteStorage<'a, Specular>, shinies: WriteStorage<'a, Shininess>, images_to_load: WriteStorage<'a, ImageToLoad>, normals: WriteStorage<'a, Normals>, materials: WriteStorage<'a, Material>, textures: WriteStorage<'a, Texture>, texcoords: WriteStorage<'a, TexCoords>, } impl<'a> System<'a> for ModelLoader { type SystemData = SysData<'a>; fn run(&mut self, mut s: SysData) { let preloaded = (&s.entities, &s.models_to_load).join() .filter(|(_, m)| m.preloaded).map(|(e, _)| e).collect::<Vec<_>>(); for entity in preloaded { let mut models_to_load = s.models_to_load.remove(entity).unwrap(); let files_and_content = files_and_content(&mut s, &mut models_to_load); let (models, materials) = models_and_materials(files_and_content); let materials_and_textures = materials.iter().map(|material| { let material_model = create_material_entity(&mut s, &material); let texture_model = create_texture_entity(&mut s, &material); (material_model, texture_model) }).collect::<Vec<_>>(); for (model, material_index, filename) in models { let material_and_texture = material_index.map(|i| materials_and_textures[i]); let geometry_model = create_buffer_entity(&mut s, &model, &model.mesh.positions, 3, "geometry").unwrap(); let normals_model = create_buffer_entity(&mut s, &model, &model.mesh.normals, 3, "normals"); let texcoords_model = create_buffer_entity(&mut s, &model, &model.mesh.texcoords, 2, "texcoords"); if let Some(model) = normals_model { s.normals.insert(geometry_model, Normals { model }).unwrap(); } if let Some(model) = texcoords_model { s.texcoords.insert(geometry_model, TexCoords { model }).unwrap(); } if let Some((model, _)) = material_and_texture { s.materials.insert(geometry_model, Material { model }).unwrap(); } if let Some((_, Some(model))) = material_and_texture { s.textures.insert(geometry_model, Texture { model }).unwrap(); } s.model_groups.add(filename, &geometry_model); } } } } #[derive(Copy, Clone, Eq, PartialEq)] enum FileType { Object, Material } type FilesAndContent = Vec<(FileType, String, String)>; fn files_and_content(s: &mut SysData, models_to_load: &mut ModelsToLoad) -> FilesAndContent { let model_filenames = &[ (FileType::Material, &models_to_load.material_filenames), (FileType::Object, &models_to_load.object_filenames), ]; model_filenames.iter().flat_map(|(file_type, filenames)| { filenames.iter().map(|(filename, file_loader)| { let entity = file_loader.unwrap(); let content = s.file_contents.remove(entity).unwrap(); s.entities.delete(entity).unwrap(); (*file_type, filename.clone(), content.0) }).collect::<Vec<_>>() }).collect() } fn models_and_materials(files_and_content: FilesAndContent) -> (Vec<(tobj::Model, Option<usize>, String)>, Vec<tobj::Material>) { let (mut models, mut materials) = (vec![], vec![]); let mut material_map = HashMap::new(); for (file_type, filename, content) in &files_and_content { match file_type { FileType::Material => { material_map.insert(filename, content); }, FileType::Object => { let mut reader = BufReader::new(content.as_bytes()); let (m1, mut m2) = load_obj_buf(&mut reader, |p| { let name = p.to_str().unwrap().to_string(); let material = material_map.get(&name).unwrap(); load_mtl_buf(&mut BufReader::new(material.as_bytes())) }).unwrap(); for model in m1 { let offset = materials.len(); let index = model.mesh.material_id.map(|i| i + offset); models.push((model, index, filename.clone())); } materials.append(&mut m2); }, } } (models, materials) } fn create_material_entity(s: &mut SysData, material: &tobj::Material) -> Entity { let entity = s.entities.create(); let ambient = material.ambient.into(); let diffuse = material.diffuse.into(); let specular = material.specular.into(); let shininess = material.shininess.into(); let name = format!("material_{}", material.name); s.ambients.insert(entity, Ambient(ambient)).unwrap(); s.diffuses.insert(entity, Diffuse(diffuse)).unwrap(); s.speculars.insert(entity, Specular(specular)).unwrap(); s.shinies.insert(entity, Shininess(shininess)).unwrap(); s.names.insert(entity, Name(name)).unwrap(); entity } fn create_texture_entity(s: &mut SysData, material: &tobj::Material) -> Option<Entity> { let entity = s.entities.create(); let filenames = [ &material.ambient_texture, &material.diffuse_texture, &material.specular_texture, &material.normal_texture, &material.dissolve_texture, ]; let filename = filenames.iter().find(|f| !f.is_empty()).cloned()?; s.images_to_load.insert(entity, ImageToLoad::new(filename)).unwrap(); s.names.insert(entity, Name(filename.to_string())).unwrap(); Some(entity) } fn create_buffer_entity(s: &mut SysData, model: &tobj::Model, field: &[f32], dimensions: u32, name_prefix: &str) -> Option<Entity> { if field.len() == 0 { return None; } let mesh = &model.mesh; let entity = s.entities.create(); let d = dimensions as usize; let data = mesh.indices.iter() .flat_map(|&i| field[d * i as usize..].iter().take(d).cloned()) .collect::<Vec<_>>(); let name = format!("{}_{}", name_prefix, model.name); s.buffer_datas.insert(entity, BufferData(data)).unwrap(); s.dimensions.insert(entity, Dimensions(dimensions)).unwrap(); s.names.insert(entity, Name(name)).unwrap(); Some(entity) }
true
b39f21f5525ae32dbc32d1fcce20daeced9dffc2
Rust
froyoframework/rust-intro
/basic-rust-sample/src/main.rs
UTF-8
2,177
3.546875
4
[ "MIT" ]
permissive
#[derive(Debug)] struct Pemain { nama: String, umur: i32, gol: i32, } impl Pemain { fn new(nama: &str) -> Pemain { Pemain { nama: nama.to_string(), umur: 27, gol: 100 } } fn get_gol(&self) -> i32 { self.gol } } fn main() { // Variabel dasar let angka = 9; let salam = "Selamat datang, Android no "; let halo = format!("{} {}", salam, angka); println!("{:?}", halo); // variabel dan fungsi let angka_saya = calc(angka); println!("{}", angka_saya); let messi = tambah_pemain("Messi", 29, 500); println!("{:?}", messi); let pemain_keren = tambah_para_pemain(); println!("{:?}", pemain_keren); // contoh ownership dan borrowing (reference) // pemain_bola adalah reference ke pemain_keren, sehingga pemain_keren tetap bisa diprint let pemain_bola = &pemain_keren; println!("pemain pertama adalah: {}", pemain_keren[0].nama); // b adalah mutable reference ke a // *b artinya mengakses isi dari reference, dalam hal ini, isi dari a // karena mutable reference, maka isi a bisa diganti let mut a = 90; { let b = &mut a; // a dipinjam di sini *b += 9; // isi a diakses di sini } // peminjaman a berakhir di sini println!("a bernilai {}", a); let pemain_milan = Pemain::new("Andre Silva"); let jumlah_gol = pemain_milan.get_gol(); println!("Jumlah gol Andre Silva {}", jumlah_gol); } fn calc(x: i32) -> i32 { let y; match x { 1...40 => y = 34, _ => y = 2, } y } fn tambah_pemain(nama_: &str, umur_: i32, gol_: i32) -> Pemain { let pemain_saya = Pemain { nama: nama_.to_string(), umur: umur_, gol: gol_, }; pemain_saya } fn tambah_para_pemain() -> Vec<Pemain> { let ronaldo = tambah_pemain("Ronaldo", 31, 510); let bacca = tambah_pemain("Bacca", 31, 235); let payet = tambah_pemain("Payet", 28, 150); let mut pemain_favorit = Vec::new(); pemain_favorit.push(ronaldo); pemain_favorit.push(bacca); pemain_favorit.push(payet); pemain_favorit }
true
5fa21c955021f62daeb826640b6400d6c2500574
Rust
sampersand/qutie
/src/obj/objects/null.rs
UTF-8
1,564
2.828125
3
[]
no_license
use obj::objects::object::{Object, ObjType}; use obj::objects::boolean::{Boolean, FALSE}; pub struct Null {} pub const NULL: Null = Null{}; impl Null { #[inline] pub fn get() -> Null { NULL } pub fn to_string(&self) -> String { "null".to_string() } } use std; impl_defaults!(Debug; Null, "Null"); impl_defaults!(Display; to_string; Null); use obj::traits::operators::QtEql; use std::rc::Rc; use obj::result::{BoolResult, ObjError}; impl QtEql for Null { fn qt_eql(&self, other: &Rc<Object>) -> BoolResult { Ok(Boolean::get(other.is_a(ObjType::Null)).to_rc()) } } use obj::traits::conversion::{ToBoolean, ToText}; use obj::objects::text::Text; impl ToText for Null { fn to_text(&self) -> Result<Rc<Text>, ObjError> { Ok(Text::from(self.to_string()).to_rc()) } } impl ToBoolean for Null { fn to_boolean(&self) -> Result<Rc<Boolean>, ObjError> { Ok(FALSE.to_rc()) } } impl_defaults!(ToRc; Null); impl_defaults!(Object; Null); impl_traits!(data=GetItem, Null); impl_traits!(data=SetItem, Null); impl_traits!(data=DelItem, Null); impl_traits!(operators=QtAdd, Null); impl_traits!(operators=QtSub, Null); impl_traits!(operators=QtMul, Null); impl_traits!(operators=QtDiv, Null); impl_traits!(operators=QtMod, Null); impl_traits!(operators=QtPow, Null); impl_traits!(operators=QtNeq, Null); impl_traits!(operators=QtLth, Null); impl_traits!(operators=QtGth, Null); impl_traits!(operators=QtLeq, Null); impl_traits!(operators=QtGeq, Null); impl_traits!(misc=QtCall, Null);
true
75ef22efe4aa71bf152891bad8c4955aeff07d69
Rust
Orangenosecom/philipshue
/examples/recall_scene.rs
UTF-8
915
2.5625
3
[ "MIT" ]
permissive
extern crate philipshue; use std::env; use std::num::ParseIntError; use philipshue::bridge::Bridge; mod discover; use discover::discover; fn main() { match run() { Ok(()) => (), Err(_) => println!("Invalid number!"), } } fn run() -> Result<(), ParseIntError> { let args: Vec<String> = env::args().collect(); if args.len() < 4 { println!("Usage: {} <username> <group_id> <scene_id>", args[0]); return Ok(()); } let bridge = Bridge::new(discover().pop().unwrap(), &*args[1]); let group_id: usize = args[2].parse()?; let scene = &*args[3]; match bridge.recall_scene_in_group(group_id, scene) { Ok(resps) => { for resp in resps.into_iter() { println!("{:?}", resp) } } Err(e) => println!("Error occured when trying to send request:\n\t{}", e), } Ok(()) }
true
e05b8cd653648f4baac3718857ac2359b32ea131
Rust
akiles/embassy
/embassy-usb/src/descriptor_reader.rs
UTF-8
3,112
2.828125
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::descriptor::descriptor_type; use crate::driver::EndpointAddress; use crate::types::InterfaceNumber; #[derive(Copy, Clone, PartialEq, Eq, Debug)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] pub struct ReadError; pub struct Reader<'a> { data: &'a [u8], } impl<'a> Reader<'a> { pub fn new(data: &'a [u8]) -> Self { Self { data } } pub fn eof(&self) -> bool { self.data.is_empty() } pub fn read<const N: usize>(&mut self) -> Result<[u8; N], ReadError> { let n = self.data.get(0..N).ok_or(ReadError)?; self.data = &self.data[N..]; Ok(n.try_into().unwrap()) } pub fn read_u8(&mut self) -> Result<u8, ReadError> { Ok(u8::from_le_bytes(self.read()?)) } pub fn read_u16(&mut self) -> Result<u16, ReadError> { Ok(u16::from_le_bytes(self.read()?)) } pub fn read_slice(&mut self, len: usize) -> Result<&'a [u8], ReadError> { let res = self.data.get(0..len).ok_or(ReadError)?; self.data = &self.data[len..]; Ok(res) } pub fn read_descriptors(&mut self) -> DescriptorIter<'_, 'a> { DescriptorIter { r: self } } } pub struct DescriptorIter<'a, 'b> { r: &'a mut Reader<'b>, } impl<'a, 'b> Iterator for DescriptorIter<'a, 'b> { type Item = Result<(u8, Reader<'a>), ReadError>; fn next(&mut self) -> Option<Self::Item> { if self.r.eof() { return None; } let len = match self.r.read_u8() { Ok(x) => x, Err(e) => return Some(Err(e)), }; let type_ = match self.r.read_u8() { Ok(x) => x, Err(e) => return Some(Err(e)), }; let data = match self.r.read_slice(len as usize - 2) { Ok(x) => x, Err(e) => return Some(Err(e)), }; Some(Ok((type_, Reader::new(data)))) } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] pub struct EndpointInfo { pub configuration: u8, pub interface: InterfaceNumber, pub interface_alt: u8, pub ep_address: EndpointAddress, } pub fn foreach_endpoint(data: &[u8], mut f: impl FnMut(EndpointInfo)) -> Result<(), ReadError> { let mut ep = EndpointInfo { configuration: 0, interface: InterfaceNumber(0), interface_alt: 0, ep_address: EndpointAddress::from(0), }; for res in Reader::new(data).read_descriptors() { let (kind, mut r) = res?; match kind { descriptor_type::CONFIGURATION => { let _total_length = r.read_u16()?; let _total_length = r.read_u8()?; ep.configuration = r.read_u8()?; } descriptor_type::INTERFACE => { ep.interface = InterfaceNumber(r.read_u8()?); ep.interface_alt = r.read_u8()?; } descriptor_type::ENDPOINT => { ep.ep_address = EndpointAddress::from(r.read_u8()?); f(ep) } _ => {} } } Ok(()) }
true
2f08df3bbb4a0bbd605d1fd3850003ae87e4a723
Rust
drupalio/comprakt
/compiler-lib/src/firm_context.rs
UTF-8
7,205
2.578125
3
[ "Apache-2.0", "MIT" ]
permissive
use crate::{ firm::{ runtime::{self, Runtime}, FirmProgram, Options, ProgramGenerator, }, strtab::StringTable, type_checking::{type_analysis::TypeAnalysis, type_system::TypeSystem}, }; use lazy_static::lazy_static; use libfirm_rs::{bindings, types::TyTrait}; use optimization; use std::{ ffi::{CStr, CString}, fs, path::{Path, PathBuf}, }; #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum FirmContextState { // Each of the following 3 can leapfrog, but not jump back Built, Dumped, Optimized, // Terminal 1 AsmEmitted, // Terminal 2 ExternalBackend, } /// `FirmContext` is a singleton that represents the global state of `libFIRM` /// library configuration and FIRM-graph construction. pub struct FirmContext<'src, 'ast> { state: FirmContextState, dump_dir: PathBuf, // outputs program: FirmProgram<'src, 'ast>, } use std::sync::atomic::{AtomicBool, Ordering::SeqCst}; lazy_static! { static ref FIRM_CONTEXT_INITIALIZED: AtomicBool = AtomicBool::new(false); } impl<'src, 'ast> FirmContext<'src, 'ast> { /// /// * Initialize libFIRM /// * Setup libFIRM target options in case libFirm is used for lowering /// (TODO can we move this /// to a libFIRM backend?) /// * Build a FIRM graph from the AST stored in `type_system`. /// /// The lowering / target configuration of libFIRM is stored in global /// storage. Hence, this function must be **called at most once**, and /// it will panic otherwise. pub fn build( dump_dir: &Path, type_system: &'src TypeSystem<'src, 'ast>, type_analysis: &'src TypeAnalysis<'src, 'ast>, strtab: &'src StringTable<'src>, rtlib: Box<dyn runtime::RTLib>, ) -> FirmContext<'src, 'ast> { let dump_dir = dump_dir.to_owned(); if !dump_dir.exists() { fs::create_dir_all(&dump_dir).expect("Failed to create output directory"); } // This block protects against concurrent use of libFIRM, which is necessary // because the backend-configuration is stored in globals inside libFIRM. let prev = FIRM_CONTEXT_INITIALIZED.compare_and_swap(false, true, SeqCst); if prev { panic!( "libFIRM lowering context initialized, concurrent or \ repeated use not supported due to library-internal state" ); } else { unsafe { libfirm_rs::init(); // this call panics on error let triple = bindings::ir_get_host_machine_triple(); bindings::ir_target_set_triple(triple); // pic=1 means 'generate position independent code' bindings::ir_target_option( CString::new("pic=1").expect("CString::new failed").as_ptr(), ); bindings::ir_target_init(); bindings::set_optimize(0); // manually verified that the char* is copied internally, // thus it's ok to drop CString right away let dump_dir_cstr = CString::new(dump_dir.to_str().unwrap().as_bytes()).unwrap(); bindings::ir_set_dump_path(dump_dir_cstr.as_ptr()); } } let runtime = std::rc::Rc::new(Runtime::new(rtlib)); let generator = ProgramGenerator::new(runtime, type_system, type_analysis, strtab); let program = generator.generate(); FirmContext { state: FirmContextState::Built, dump_dir, program, } } /// May panic or fail silently if dumping fails. /// Must only be called once. pub fn high_level_dump(&mut self, opts: &Options) { use self::FirmContextState::*; match self.state { Built => self.state = Dumped, Dumped | Optimized | AsmEmitted | ExternalBackend => { panic!("invalid state {:?}", self.state) } } if opts.dump_firm_graph { let suffix = CString::new("high-level").unwrap(); unsafe { bindings::dump_all_ir_graphs(suffix.as_ptr()) }; } if opts.dump_class_layouts { for class in self.program.classes.values() { let outpath = self .dump_dir .join(class.borrow().def.name.as_str()) .with_extension("layout") .to_str() .and_then(|s| CString::new(s).ok()) .unwrap(); let mode = CStr::from_bytes_with_nul(b"w\0").unwrap().as_ptr() as *mut i8; unsafe { let file = libc::fopen(outpath.as_ptr() as *mut i8, mode); #[allow(clippy::cast_ptr_alignment)] bindings::dump_type_to_file( file as *mut libfirm_rs::bindings::_IO_FILE, class.borrow().entity.ty().ir_type(), ); libc::fclose(file); } } } } /// Must only be called once. pub fn run_optimizations(&mut self, optimizations: optimization::Level) { use self::FirmContextState::*; match self.state { Built | Dumped => self.state = Optimized, Optimized | AsmEmitted | ExternalBackend => panic!("invalid state {:?}", self.state), } optimizations.run_all(&mut self.program); } /// Must only be called once. pub fn use_external_backend(&mut self) -> &FirmProgram<'src, 'ast> { use self::FirmContextState::*; match self.state { Built | Dumped | Optimized => self.state = ExternalBackend, ExternalBackend | AsmEmitted => panic!("invalid state {:?}", self.state), } &self.program } } use crate::backend; impl backend::AsmBackend for FirmContext<'_, '_> { /// This implementation of `emit_asm` may only be called once and will panic /// on subsequent calls. fn emit_asm(&mut self, out: &mut dyn backend::AsmOut) -> std::io::Result<()> { use self::FirmContextState::*; match self.state { Built | Dumped | Optimized => self.state = AsmEmitted, AsmEmitted | ExternalBackend => panic!("invalid state {:?}", self.state), } // this can only happen once, and is protected by the state guard above unsafe { bindings::lower_highlevel(); bindings::be_lower_for_target(); // TODO: real label let label = CStr::from_bytes_with_nul(b"<unknown>\0").unwrap().as_ptr(); // TODO libc and libFIRM error checks let fd = out.as_raw_fd(); let mode = CStr::from_bytes_with_nul(b"w\0").unwrap().as_ptr(); let assembly_file = libc::fdopen(fd, mode); #[allow(clippy::cast_ptr_alignment)] bindings::be_main(assembly_file as *mut bindings::_IO_FILE, label); // not close, since we used fdopen (will be closed by creator of `out`) libc::fflush(assembly_file); } Ok(()) } }
true
cc0dc8d0859474bb5b14b062fc8837e151a927ec
Rust
dpc/rust-extfsm
/src/lib.rs
UTF-8
35,883
3.125
3
[ "Apache-2.0", "LicenseRef-scancode-warranty-disclaimer" ]
permissive
//! Implementation of a generic final state machine with //! extended state. Features worth mentioning: //! //! * optional exit/enter transitions on states //! * each event instance can provide boxed arguments to transiton closure //! * each transition closure can return with vector of arguments that //! are queued at the end of outstanding events queue //! * can generate dot represenation of itself //! //! # Author //! Tony Przygienda, 2016 //! //! # Examples //! Check out the tests in the implementation for a good example of use //! //! # Panics //! Never //! //! # Errors //! refer to `Errors` //! //! # Copyrights //! //! Copyright (c) 2017, Juniper Networks, Inc. //! All rights reserved. //! //! Licensed under the Apache License, Version 2.0 (the "License"); //! you may not use this file except in compliance with the License. //! This code is not an official Juniper product. //! You may obtain a copy of the License at //! //! http://www.apache.org/licenses/LICENSE-2.0 //! //! Unless required by applicable law or agreed to in writing, software //! distributed under the License is distributed on an "AS IS" BASIS, //! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //! See the License for the specific language governing permissions and //! limitations under the License. #[macro_use] extern crate slog; extern crate dot; extern crate uuid; #[macro_use] extern crate custom_derive; #[macro_use] extern crate enum_derive; use std::collections::{HashMap, VecDeque}; use std::cell::{RefMut, RefCell, Ref}; use std::hash::Hash; use std::fmt::Debug; use std::iter::Iterator; use slog::Logger; use std::default::Default; use std::io; use std::fs; use uuid::Uuid; /// types of transitions on states #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub enum EntryExit { EntryTransition, ExitTransition, } #[derive(Debug, Clone, PartialEq, Eq)] /// Errors that can occur when running FSMs pub enum Errors<EventType, StateType, ErrorType> { OK, /// internal error at a given place that can be generated by transition implementation InternalError(EventType, StateType, ErrorType), /// the requested transition does not exist, FSM needs to be shut down NoTransition(EventType, StateType), /// transition failed, you have to shut down the FSM TransitionFailure, } /// type representing an optional argument to a transition function call pub type OptionalFnArg<TransitionFnArguments> = Option<Box<TransitionFnArguments>>; /// set of events to execute with according optional argument on call of transition function pub type EventQueue<EventType, TransitionFnArguments> = VecDeque<(EventType, OptionalFnArg<TransitionFnArguments>)>; /// type to be returned by all transitions /// an optional queue of events to be added to the FSM or an error is returned pub type TransitionResult<EventType, StateType, TransitionFnArguments, ErrorType> = Result<Option<EventQueue<EventType, TransitionFnArguments>>, Errors<EventType, StateType, ErrorType>>; /// transition function used, takes optional argument and returns either with error /// or an optional set of events to be added to processing (at the end of event queue) pub type TransitionFn<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType> = Fn(RefMut<Box<ExtendedState>>, EventType, OptionalFnArg<TransitionFnArguments>) -> TransitionResult<EventType, StateType, TransitionFnArguments, ErrorType>; /// transition function to either enter or exit a specific state, return same as /// `FSMTransitionFn` pub type EntryExitTransitionFn<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType> = Fn(RefMut<Box<ExtendedState>>) -> TransitionResult<EventType, StateType, TransitionFnArguments, ErrorType>; /// *Final state machine type* /// /// # Template parameters /// /// * `ExtendedState` - provides a structure that every transition can access and /// stores extended state /// * `TransitionFnArguments` - type that can be boxed as parameters to an event instance /// * `ErrorType` - Errors that transitions can generate internally pub struct FSM<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> where StateType: Clone + Eq + Hash + Sized, EventType: Clone + Eq + Hash + Sized { name: String, pub extended_state: RefCell<Box<ExtendedState>>, current_state: StateType, event_queue: EventQueue<EventType, TransitionFnArguments>, transitions: TransitionTable<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType>, statetransitions: EntryExitTransitionTable<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType>, log: Logger, /// dotgraph structure for output dotgraph: DotGraph<StateType, EventType>, } #[derive(Clone, PartialEq, Eq, Hash)] enum DotEdgeKey<StateType, EventType> where StateType: Clone + Sized + Eq + Hash, EventType: Clone + Sized + Eq + Hash { Transition(TransitionSource<StateType, EventType>), EntryExit(EntryExitKey<StateType>), } /// internal edge to generate DOT graphical view #[derive(Clone, PartialEq, Eq)] struct DotEdge<StateType, EventType> where StateType: Clone + Sized + Eq + Hash, EventType: Clone + Sized + Eq + Hash { key: DotEdgeKey<StateType, EventType>, style: dot::Style, label: String, } #[derive(Clone, PartialEq, Eq, Hash)] struct DotNodeKey<StateType: Clone + Sized + Eq + Hash>(Option<EntryExit>, StateType); /// internal node to generate DOT graphical view #[derive(Clone, PartialEq, Eq)] struct DotNode<StateType> where StateType: Clone + Sized + Eq + Hash { /// None for EntryExit signifies node used for normal transitions /// otherwise it's a "shadow node" that does not show up but can be used for /// entry-exit transition annotation key: DotNodeKey<StateType>, id: Uuid, shape: Option<String>, style: dot::Style, label: &'static str, } /// graph containing the DOT equivalent of the FSM struct DotGraph<StateType, EventType> where StateType: Clone + Sized + Eq + Hash, EventType: Clone + Sized + Eq + Hash { nodes: HashMap<DotNodeKey<StateType>, DotNode<StateType>>, edges: HashMap<DotEdgeKey<StateType, EventType>, DotEdge<StateType, EventType>>, id: Uuid, /// starting state of FSM start_state: Option<StateType>, } impl<StateType, EventType> Default for DotGraph<StateType, EventType> where StateType: Clone + Sized + Eq + Hash, EventType: Clone + Sized + Eq + Hash { fn default() -> DotGraph<StateType, EventType> { DotGraph { nodes: HashMap::new(), edges: HashMap::new(), id: Uuid::new_v4(), start_state: None } } } /// graphwalk impl<'a, ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> dot::GraphWalk<'a, DotNodeKey<StateType>, DotEdgeKey<StateType, EventType>> for FSM<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> where StateType: Clone + PartialEq + Eq + Hash + Sized, EventType: Clone + PartialEq + Eq + Hash + Sized, { fn nodes(&'a self) -> dot::Nodes<'a, DotNodeKey<StateType>> { self.dotgraph.nodes.keys().cloned().collect() } fn edges(&'a self) -> dot::Edges<'a, DotEdgeKey<StateType, EventType>> { self.dotgraph.edges.keys().cloned().collect() } fn source(&self, e: &DotEdgeKey<StateType, EventType>) -> DotNodeKey<StateType> { match e { &DotEdgeKey::EntryExit(ref eek) => { if eek.1 == EntryExit::EntryTransition { DotNodeKey(Some(eek.1.clone()), eek.0.clone()) } else { if let Some(_) = self.statetransitions.get(eek) { DotNodeKey(None, eek.0.clone()) } else { unreachable!(); } } } &DotEdgeKey::Transition(ref tk) => { DotNodeKey(None, tk.state.clone()) } } } fn target(&self, e: &DotEdgeKey<StateType, EventType>) -> DotNodeKey<StateType> { // target more tricky, we have to lookup the real table match e { &DotEdgeKey::EntryExit(ref eek) => { if eek.1 == EntryExit::ExitTransition { DotNodeKey(Some(eek.1.clone()), eek.0.clone()) } else { if let Some(_) = self.statetransitions.get(eek) { DotNodeKey(None, eek.0.clone()) } else { unreachable!(); } } } &DotEdgeKey::Transition(ref tk) => { if let Some(dn) = self.transitions.get(tk) { DotNodeKey(None, dn.endstate.clone()) } else { unreachable!(); } } } } } /// graph labelling impl<'a, ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> dot::Labeller<'a, DotNodeKey<StateType>, DotEdgeKey<StateType, EventType>> for FSM<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> where StateType: Clone + PartialEq + Eq + Hash + Sized, EventType: Clone + PartialEq + Eq + Hash + Sized, { fn graph_id(&'a self) -> dot::Id<'a> { let gid = format!("G{}", self.dotgraph.id.simple()); dot::Id::new(gid).unwrap() } fn node_id(&'a self, n: &DotNodeKey<StateType>) -> dot::Id<'a> { /// get the node match self.dotgraph.nodes.get(n) { Some(realnode) => { let fid = format!("N{}", realnode.id.simple()); dot::Id::new(fid).unwrap() } None => unreachable!(), } } fn node_shape(&'a self, n: &DotNodeKey<StateType>) -> Option<dot::LabelText<'a>> { match self.dotgraph.nodes.get(n) { Some(realnode) => { if let Some(ref r) = realnode.shape { Some(dot::LabelText::LabelStr(r.as_str().into())) } else { Some(dot::LabelText::LabelStr("oval".into())) } } None => unreachable!(), } } fn node_style(&'a self, n: &DotNodeKey<StateType>) -> dot::Style { match self.dotgraph.nodes.get(n) { Some(realnode) => { realnode.style } None => unreachable!(), } } fn edge_end_arrow(&'a self, _e: &DotEdgeKey<StateType, EventType>) -> dot::Arrow { dot::Arrow::normal() } fn edge_start_arrow(&'a self, _e: &DotEdgeKey<StateType, EventType>) -> dot::Arrow { dot::Arrow::none() } fn edge_style(&'a self, _e: &DotEdgeKey<StateType, EventType>) -> dot::Style { dot::Style::None } fn node_label<'b>(&'b self, n: &DotNodeKey<StateType>) -> dot::LabelText<'b> { match self.dotgraph.nodes.get(n) { Some(realnode) => { dot::LabelText::LabelStr(realnode.label.into()) } None => unreachable!(), } } fn edge_label<'b>(&'b self, ek: &DotEdgeKey<StateType, EventType>) -> dot::LabelText<'b> { match self.dotgraph.edges.get(ek) { Some(realedge) => { dot::LabelText::LabelStr(realedge.label.clone().into()) } None => unreachable!(), } } } /// trait that can process events from a queue using a transition table pub trait RunsFSM<EventType, StateType, TransitionFnArguments, ErrorType> { /// add events to the event queue @ the back, events are _not_ processed fn add_events(&mut self, events: &mut Vec<(EventType, OptionalFnArg<TransitionFnArguments>)>) -> Result<u32, Errors<EventType, StateType, ErrorType>>; /// process the whole event queue. Observe that this can generate multiple messages /// and queue events against the FSM itself again so don't rely which state the machine ends /// up in /// /// `returns` - number of events processed or errors encountered. /// On errors not much can be done /// except killing the FSM instance fn process_event_queue(&mut self) -> Result<u32, Errors<EventType, StateType, ErrorType>>; } /// implementation of methods to contstruct the machine impl<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> FSM<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> where StateType: Clone + Eq + Hash + Sized, EventType: Clone + Eq + Hash + Sized, { /// new FSM with an initial extended state box'ed up so it can be passed around easily pub fn new(start_state: StateType, extended_init: Box<ExtendedState>, name: &str, log: Logger) -> FSM<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> { let mut g = DotGraph::default(); g.start_state = Some(start_state.clone()); FSM { log: log, name: String::from(name), current_state: start_state, event_queue: VecDeque::<(EventType, OptionalFnArg<TransitionFnArguments>)>::new(), transitions: TransitionTable::new(), statetransitions: EntryExitTransitionTable::new(), extended_state: RefCell::new(extended_init), dotgraph: g, } } /// provides output of the FSM in dot format /// /// * `filename` - optional filename pub fn dotfile(&mut self, filename: Option<String>, state2name: &HashMap<StateType, &'static str>, event2name: &HashMap<EventType, &'static str>, ) -> Result<(), io::Error> { let fileattempt = if let Some(fname) = filename { fs::File::create(fname).map(|f| Some(f)) } else { Ok(None) }; if let Ok(maybef) = fileattempt { let sout = io::stdout(); let sv = state2name.keys().cloned().collect::<Vec<_>>(); // generate the graph, nodes first for n in sv.iter() { // first _real_ nodes, i.e. not entry/exit let key = DotNodeKey(None, n.clone()); let shape = if let Some(ref sn) = self.dotgraph.start_state { if sn == n { Some(String::from("diamond")) } else { None } } else { None }; self.dotgraph.nodes.insert(key.clone(), DotNode { key: key, id: Uuid::new_v4(), shape: shape, style: dot::Style::None, label: state2name.get(n).unwrap_or(&"?"), } ); // now, let's generate pseudo nodes if necessary with entry, exit with // invisible shapes for t in [EntryExit::EntryTransition, EntryExit::ExitTransition] .into_iter() .collect::<Vec<_>>() .into_iter() { let eek = (n.clone(), t.clone()); match self.statetransitions.get(&eek) { None => {} Some(_) => { let label = match t { &EntryExit::EntryTransition => { "Enter".into() } &EntryExit::ExitTransition => { "Exit".into() } }; let key = DotNodeKey(Some(t.clone()), n.clone()); self.dotgraph.nodes.insert(key.clone(), DotNode { key: key, id: Uuid::new_v4(), shape: Some(String::from("plain")), style: dot::Style::Dashed, label: label, }); } } } } // generate the edges now & label them for t in self.transitions.iter() { let (tk, tv) = t; let key = DotEdgeKey::Transition(TransitionSource::new(tk.state.clone(), tk.event.clone())); self.dotgraph.edges.insert(key.clone(), DotEdge { key: key, style: dot::Style::None, label: format!("{}\n|{}|", tv.name.clone() .unwrap_or(String::from("")), event2name.get(&tk.event) .unwrap_or(&"")) } ); } for t in self.statetransitions.iter() { let (tk, tv) = t; let key: DotEdgeKey<StateType, EventType> = DotEdgeKey::EntryExit((tk.0.clone(), tk.1.clone())); self.dotgraph.edges.insert(key.clone(), DotEdge { key: key, style: dot::Style::None, label: format!("{}", tv.1.clone().unwrap_or( String::from(""))) }); } let render = move |mut mf, mut sout| { match &mut mf { &mut Some(ref mut f) => dot::render(self, f), _ => dot::render(self, &mut sout) // as io::Write } }; render(maybef, sout) } else { Err(fileattempt.err().unwrap()) // error } } /// new transition /// /// `returns` - TRUE if transition has been inserted, /// FALSE if a previous has been overwritten! pub fn add_transition(&mut self, from: TransitionSource<StateType, EventType>, to: TransitionTarget<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType>) -> bool { self.transitions.insert(from, to).is_none() } /// new enter/exit transition per state /// executed _after_ the transition right before /// the state is entered. If the machine remains in the same state /// neither the enter nor the exit transitions are called /// /// `returns` - TRUE if transition has been inserted, FALSE if a /// previous has been overwritten! pub fn add_enter_transition(&mut self, case: (StateType, EntryExit), trans: Box<EntryExitTransitionFn<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType>>, name: Option<&str>) -> bool { self.statetransitions.insert(case, (trans, name.map(|s| String::from(s)))).is_none() } pub fn name(&self) -> &String { &self.name } /// gives a read only peek into the extended state from the outside of transitions. /// Must be given up before running machine of course pub fn extended_state(&self) -> Ref<Box<ExtendedState>> { self.extended_state.borrow() } /// check current state read-only pub fn current_state(&self) -> StateType { self.current_state.clone() } /// `returns` - TRUE if machine has outstanding events queued to process pub fn events_pending(&self) -> bool { self.event_queue.len() > 0 } } /// describes a transition origination point #[derive(Hash, Eq, PartialEq, Clone)] pub struct TransitionSource<StateType, EventType> { state: StateType, event: EventType } impl<StateType, EventType> TransitionSource<StateType, EventType> { /// create a transition source /// * `state` - original state /// * `event` - event occuring pub fn new(state: StateType, event: EventType) -> TransitionSource<StateType, EventType> { TransitionSource { state: state, event: event, } } } type EntryExitKey<StateType> = (StateType, EntryExit); /// implements the target of a transition upon an event pub struct TransitionTarget<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> { endstate: StateType, transfn: Box<TransitionFn<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType>>, name: Option<String>, } impl<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> TransitionTarget<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> { /// create a transition target /// * `endstate` - state resulting after correct transition /// * `transfn` - transition as a boxed function taking in extended state, /// event and possible arguments /// * `name` - optional transition name, helpful if `transfn` is a closure pub fn new(endstate: StateType, transfn: Box<TransitionFn<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType>>, name: Option<&str>) -> TransitionTarget <ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> { TransitionTarget { endstate: endstate, transfn: transfn, name: name.map(|s| String::from(s)) } } } /// map of from state/event to end state/transition type TransitionTable<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> = HashMap<// from TransitionSource<StateType, EventType>, TransitionTarget<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType>>; /// map for state entry/exit transitions type EntryExitTransitionTable<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> = HashMap<// from EntryExitKey<StateType>, // transition (Box<EntryExitTransitionFn<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType>>, Option<String>)>; impl<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType> RunsFSM<EventType, StateType, TransitionFnArguments, ErrorType> for FSM<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType> where StateType: Clone + PartialEq + Eq + Hash + Debug + Sized, EventType: Clone + PartialEq + Eq + Hash + Debug + Sized, ErrorType: Debug { fn add_events(&mut self, events: &mut Vec<(EventType, OptionalFnArg<TransitionFnArguments>)>) -> Result<u32, Errors<EventType, StateType, ErrorType>> { let el = events.len(); debug!(self.log, "FSM {} adding {} events", self.name, el); // move the queue into the closure and add events events.drain(..).map(move |e| { self.event_queue.push_back(e); }).last(); Ok(el as u32) } fn process_event_queue(&mut self) -> Result<u32, Errors<EventType, StateType, ErrorType>> { // need to recopy since we will be adding new events on transition possibly // so current events need to be frozen let mut evs = self.event_queue.drain(..).collect::<Vec<_>>(); let nrev = evs.len() as u32; let mut lr: Vec<Errors<EventType, StateType, ErrorType>> = evs.drain(..).map(|e| { let state = self.current_state.clone(); let event = e.0.clone(); let trans = self.transitions.get(&TransitionSource::new(state.clone(), event.clone())); let ref mut q = self.event_queue; let name = &self.name; debug!(self.log, "FSM {} processing event {:?}/{:?}", name, event, state); // play the entry, exit transition draining the event queues if necessary fn entryexit<ExtendedState, EventType, StateType, TransitionFnArguments, ErrorType>( log: &Logger, extstate: RefMut<Box<ExtendedState>>, name: &str, s: StateType, dir: EntryExit, q: &mut EventQueue<EventType, TransitionFnArguments>, trans: &EntryExitTransitionTable<ExtendedState, StateType, EventType, TransitionFnArguments, ErrorType>) -> Errors<EventType, StateType, ErrorType> where StateType: Clone + PartialEq + Eq + Hash + Debug, EventType: Clone + PartialEq + Eq + Hash + Debug, ErrorType: Debug { match trans.get(&(s.clone(), dir)) { None => Errors::OK, Some(ref tuple) => { let ref func = tuple.0; let ref tname = tuple.1; debug!(log, "FSM {} exit/entry state transition for {:?} {:?}", name, s, tname); match func(extstate) { Err(v) => v, Ok(ref mut v) => { match v { &mut Some(ref mut eventset) => { eventset.drain(..).map( |e| q.push_back(e) ).last(); Errors::OK } &mut None => Errors::OK, } } } } } } match trans { Some(itrans) => { let endstate = itrans.endstate.clone(); let transfn = &itrans.transfn; let mut res = Errors::OK; res = if state == endstate.clone() { res } else { // run exit for state let extstate = self.extended_state.borrow_mut(); entryexit(&self.log, extstate, name, state.clone(), EntryExit::ExitTransition, q, &self.statetransitions) }; // only continue if exit was ok res = match res { Errors::OK => { let extstate = self.extended_state.borrow_mut(); // match ref mutably the resulting event set of the transition and // drain it into our queue back match transfn(extstate, e.0, e.1) { Err(v) => v, Ok(v) => { match v { None => {} Some(eventset) => { q.extend(eventset) } } debug!(self.log, "FSM {} moving machine to {:?}", name, endstate); self.current_state = endstate.clone(); Errors::OK } } } r => r, }; // see whether we have entry into the next one match res { Errors::OK => { if state == endstate.clone() { res } else { let extstate = self.extended_state.borrow_mut(); entryexit(&self.log, extstate, name, endstate.clone(), EntryExit::EntryTransition, q, &self.statetransitions) } } r => r, } } None => Errors::NoTransition(event, state), } // check for any errors in the whole transitions of the queue }).filter(|e| { match *e { Errors::OK => false, _ => true } }).take(1).collect::<Vec<_>>(); // try to get first error out if any // check whether we got any errors on transitions match lr.pop() { Some(x) => { debug!(self.log, "FSM {} filter on transition failures yields {:?}", self.name, &x); Err(x) } _ => Ok(nrev) } } } #[cfg(test)] mod tests { //! small test of a coin machine opening/closing and checking coins //! it does check event generation in the transition, extended state, //! transitions on state enter/exit and error returns extern crate slog; extern crate slog_term; extern crate slog_atomic; extern crate slog_async; use std::collections::HashMap; use std::hash::Hash; use std::cell::RefMut; use slog::*; use self::slog_atomic::*; use super::{FSM, Errors, RunsFSM, EntryExit, TransitionTarget, TransitionSource}; use std::borrow::Borrow; use std; #[derive(Debug, Clone)] enum StillCoinType { Good, Bad, } #[derive(Debug, Clone)] enum StillArguments { Coin(StillCoinType), } custom_derive! { #[derive(IterVariants(StateVariants), IterVariantNames(StateNames), Debug, Clone, Hash, Eq, PartialEq)] enum StillStates { ClosedWaitForMoney, CheckingMoney, OpenWaitForTimeOut, } } custom_derive! { #[derive(IterVariants(EventVariants), IterVariantNames(EventNames), Debug, Clone, Hash, Eq, PartialEq)] enum StillEvents { GotCoin, // needs coin type AcceptMoney, RejectMoney, Timeout, } } #[derive(Debug)] enum StillErrors { CoinArgumentMissing, } struct StillExtState { coincounter: u32, opened: u32, closed: u32, } type CoinStillFSM = FSM<StillExtState, StillStates, StillEvents, StillArguments, StillErrors>; fn build_fsm() -> CoinStillFSM { let decorator = slog_term::PlainDecorator::new(std::io::stdout()); let drain = slog_term::CompactFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let drain = AtomicSwitch::new(drain); // Get a root logger that will log into a given drain. let mainlog = Logger::root(LevelFilter::new(drain, Level::Info).fuse(), o!("version" => env!("CARGO_PKG_VERSION"),)); let mut still_fsm = FSM::<StillExtState, StillStates, StillEvents, StillArguments, StillErrors>::new(StillStates::ClosedWaitForMoney, Box::new(StillExtState { coincounter: 0, opened: 0, closed: 0, }), "coin_still", mainlog); let check_money = move |_extstate: RefMut<Box<StillExtState>>, _ev: StillEvents, arg: Option<Box<StillArguments>>| { match arg { None => { Err(Errors::InternalError(StillEvents::GotCoin, StillStates::ClosedWaitForMoney, StillErrors::CoinArgumentMissing)) } Some(arg) => { match (*arg).borrow() { &StillArguments::Coin(ref t) => { match t { &StillCoinType::Good => { Ok(Some(vec![(StillEvents::AcceptMoney, None)] .into_iter() .collect())) } &StillCoinType::Bad => { Ok(Some(vec![(StillEvents::RejectMoney, None)] .into_iter() .collect())) } } } } } } }; still_fsm.add_transition(TransitionSource::new(StillStates::ClosedWaitForMoney, StillEvents::GotCoin), TransitionTarget::new(StillStates::CheckingMoney, Box::new(check_money), Some("ProcessCoin"))); still_fsm.add_transition(TransitionSource::new(StillStates::CheckingMoney, StillEvents::RejectMoney), TransitionTarget::new(StillStates::ClosedWaitForMoney, Box::new(|_, _, _| Ok(None)), Some("Rejected"))); still_fsm.add_transition(TransitionSource::new(StillStates::CheckingMoney, StillEvents::GotCoin), TransitionTarget::new(StillStates::CheckingMoney, Box::new(|_, _, _| Ok(None)), Some("IgnoreAnotherCoin"))); still_fsm.add_transition(TransitionSource::new(StillStates::CheckingMoney, StillEvents::AcceptMoney), TransitionTarget::new(StillStates::OpenWaitForTimeOut, Box::new(|ref mut estate, _, _| { estate.coincounter += 1; // we count open/close on entry/exit Ok(None) }), Some("Accepted"))); still_fsm.add_transition(TransitionSource::new(StillStates::OpenWaitForTimeOut, StillEvents::GotCoin), TransitionTarget::new(StillStates::OpenWaitForTimeOut, Box::new(|_, _, _| { Ok(Some(vec![(StillEvents::RejectMoney, None)] .into_iter() .collect())) }), Some("Reject"))); still_fsm.add_transition(TransitionSource::new(StillStates::OpenWaitForTimeOut, StillEvents::RejectMoney), TransitionTarget::new(StillStates::OpenWaitForTimeOut, Box::new(|_, _, _| Ok(None)), Some("Rejected"))); still_fsm.add_transition(TransitionSource::new(StillStates::OpenWaitForTimeOut, StillEvents::Timeout), TransitionTarget::new(StillStates::ClosedWaitForMoney, Box::new(|_, _, _| Ok(None)), Some("TimeOut"))); still_fsm.add_enter_transition((StillStates::OpenWaitForTimeOut, EntryExit::EntryTransition), Box::new(|ref mut estate| { estate.opened += 1; Ok(None) }), Some("CountOpens")); still_fsm.add_enter_transition((StillStates::OpenWaitForTimeOut, EntryExit::ExitTransition), Box::new(|ref mut estate| { estate.closed += 1; Ok(None) }), Some("CountClose")); still_fsm } #[test] fn coin_machine_test() { let mut still_fsm = build_fsm(); // timeout should give no transition error assert!(still_fsm.add_events(&mut vec![(StillEvents::Timeout, None)]).unwrap() == 1); match still_fsm.process_event_queue() { Ok(v) => panic!(format!("failed with {:?} # processed tokens as Ok(_)", v)), Err(v) => { match v { Errors::NoTransition(StillEvents::Timeout, StillStates::ClosedWaitForMoney) => { () } _ => panic!("failed with wrong FSM error"), } } } // that's how we package arguments, we need to clone the coins then let goodcoin = Box::new(StillArguments::Coin(StillCoinType::Good)); let badcoin = Box::new(StillArguments::Coin(StillCoinType::Bad)); let mut still_fsm = build_fsm(); assert!(still_fsm.add_events(&mut vec![(StillEvents::GotCoin, Some(goodcoin.clone())), (StillEvents::GotCoin, Some(badcoin.clone())), (StillEvents::GotCoin, Some(goodcoin.clone())), (StillEvents::GotCoin, Some(goodcoin.clone()))]) .unwrap() == 4); while still_fsm.events_pending() { assert!(!still_fsm.process_event_queue().is_err()); } assert!(still_fsm.current_state() == StillStates::OpenWaitForTimeOut); assert!(still_fsm.add_events(&mut vec![(StillEvents::Timeout, None), ]).unwrap() == 1); while still_fsm.events_pending() { assert!(!still_fsm.process_event_queue().is_err()); } assert!(still_fsm.current_state() == StillStates::ClosedWaitForMoney); let es = still_fsm.extended_state(); assert!(es.borrow().coincounter == 1); assert!(es.borrow().opened == 1); assert!(es.borrow().closed == 1); } fn zipit<ET>(i1: Box<Iterator<Item=ET>>, i2: Box<Iterator<Item=&'static str>>) -> HashMap<ET, &'static str> where ET: Sized + Eq + Hash { i1.zip(i2).collect::<HashMap<_, _>>() } #[test] fn coin_machine_dot() { let mut still_fsm = build_fsm(); still_fsm.dotfile(None, &zipit(Box::new(StillStates::iter_variants()), Box::new(StillStates::iter_variant_names())), &zipit(Box::new(StillEvents::iter_variants()), Box::new(StillEvents::iter_variant_names()))) .expect("cannot dotfile"); still_fsm.dotfile(Some("target/tmp.dot".into()), &zipit(Box::new(StillStates::iter_variants()), Box::new(StillStates::iter_variant_names())), &zipit(Box::new(StillEvents::iter_variants()), Box::new(StillEvents::iter_variant_names()))) .expect("cannot dotfile"); } }
true
94b246a882da9e3cddd1661079d8becd5ae5d3c6
Rust
houlei/QuantMath
/src/instruments/assets.rs
UTF-8
12,382
3.15625
3
[ "MIT" ]
permissive
use std::rc::Rc; use std::fmt::Display; use std::fmt; use std::cmp::Ordering; use std::hash::Hash; use std::hash::Hasher; use instruments::Instrument; use instruments::Priceable; use instruments::PricingContext; use instruments::DependencyContext; use instruments::SpotRequirement; use dates::rules::DateRule; use dates::datetime::TimeOfDay; use dates::datetime::DateTime; use dates::datetime::DateDayFraction; use core::qm; /// Represents a currency. Generally currencies have a one-to-one mapping with /// world currencies. There is an exception in countries like Korea, which have /// distinct onshore and offshore currencies, due to tradeability restrictions. /// /// This currency always represents major units such as dollars or pounds, /// rather than minor units such as cents or pence. #[derive(Clone, Debug)] pub struct Currency { id: String, settlement: Rc<DateRule> } impl Currency { pub fn new(id: &str, settlement: Rc<DateRule>) -> Currency { Currency { id: id.to_string(), settlement: settlement } } } impl Instrument for Currency { fn id(&self) -> &str { &self.id } fn payoff_currency(&self) -> &Currency { self } fn credit_id(&self) -> &str { // for a currency, we always take its credit id as its own name &self.id } fn settlement(&self) -> &Rc<DateRule> { &self.settlement } fn dependencies(&self, context: &mut DependencyContext) -> SpotRequirement { dependence_on_spot_discount(self, context); // for a currency, the spot is always one (in units of its own currency) SpotRequirement::NotRequired } fn as_priceable(&self) -> Option<&Priceable> { Some(self) } } impl Display for Currency { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.id.fmt(f) } } impl Ord for Currency { fn cmp(&self, other: &Currency) -> Ordering { self.id.cmp(&other.id) } } impl PartialOrd for Currency { fn partial_cmp(&self, other: &Currency) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialEq for Currency { fn eq(&self, other: &Currency) -> bool { self.id == other.id } } impl Eq for Currency {} impl Hash for Currency { fn hash<H: Hasher>(&self, state: &mut H) { self.id.hash(state); } } impl Priceable for Currency { fn as_instrument(&self) -> &Instrument { self } /// Currency is worth one currency unit, but only if we are discounting /// to the date which is when we would receive the currency. fn price(&self, context: &PricingContext) -> Result<f64, qm::Error> { discount_from_spot(self, context) } } /// Simple assets are worth the screen price, but only if the date we want /// to discount to is the same as the date when the spot price is paid. /// /// This method calculates the discount to apply to a spot price. pub fn discount_from_spot(instrument: &Instrument, context: &PricingContext) -> Result<f64, qm::Error> { match context.discount_date() { None => Ok(1.0), Some(discount_date) => { let spot_date = context.spot_date(); let pay_date = instrument.settlement().apply(spot_date); if discount_date == pay_date { Ok(1.0) } else { let yc = context.yield_curve(instrument.credit_id(), discount_date.max(pay_date))?; yc.df(pay_date, discount_date) } } } } pub fn dependence_on_spot_discount(instrument: &Instrument, context: &mut DependencyContext) { // We can assume that the pricing context will provide discounts // at least up to its own discount date, so we do not need to specify // this dependency let spot_date = context.spot_date(); let pay_date = instrument.settlement().apply(spot_date); context.yield_curve(instrument.credit_id(), pay_date); } /// Represents an equity single name or index. Can also be used to represent /// funds and ETFs, #[derive(Clone, Debug)] pub struct Equity { id: String, credit_id: String, currency: Rc<Currency>, settlement: Rc<DateRule> } impl Equity { pub fn new(id: &str, credit_id: &str,currency: Rc<Currency>, settlement: Rc<DateRule>) -> Equity { Equity { id: id.to_string(), credit_id: credit_id.to_string(), currency: currency, settlement: settlement } } } impl Instrument for Equity { fn id(&self) -> &str { &self.id } fn payoff_currency(&self) -> &Currency { &*self.currency } fn credit_id(&self) -> &str { &self.credit_id } fn settlement(&self) -> &Rc<DateRule> { &self.settlement } fn dependencies(&self, context: &mut DependencyContext) -> SpotRequirement { dependence_on_spot_discount(self, context); SpotRequirement::Required } fn time_to_day_fraction(&self, date_time: DateTime) -> Result<DateDayFraction, qm::Error> { // for now, we hard-code the conversion. Later we shall // allow this to be set per equity let day_fraction = match date_time.time_of_day() { TimeOfDay::Open => 0.0, TimeOfDay::EDSP => 0.0, TimeOfDay::Close => 0.8 }; Ok(DateDayFraction::new(date_time.date(), day_fraction)) } fn as_priceable(&self) -> Option<&Priceable> { Some(self) } } impl Display for Equity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.id.fmt(f) } } impl Ord for Equity { fn cmp(&self, other: &Equity) -> Ordering { self.id.cmp(&other.id) } } impl PartialOrd for Equity { fn partial_cmp(&self, other: &Equity) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialEq for Equity { fn eq(&self, other: &Equity) -> bool { self.id == other.id } } impl Eq for Equity {} impl Hash for Equity { fn hash<H: Hasher>(&self, state: &mut H) { self.id.hash(state); } } impl Priceable for Equity { fn as_instrument(&self) -> &Instrument { self } /// The price of an equity is the current spot, but only if the date we /// are discounting to is the same as the spot would be paid. fn price(&self, context: &PricingContext) -> Result<f64, qm::Error> { let df = discount_from_spot(self, context)?; let spot = context.spot(&self.id)?; Ok(spot * df) } } /// Represents a credit entity #[derive(Clone, Debug)] pub struct CreditEntity { id: String, currency: Rc<Currency>, settlement: Rc<DateRule> } impl CreditEntity { pub fn new(id: &str, currency: Rc<Currency>, settlement: Rc<DateRule>) -> CreditEntity { CreditEntity { id: id.to_string(), currency: currency, settlement: settlement } } } impl Instrument for CreditEntity { fn id(&self) -> &str { &self.id } fn payoff_currency(&self) -> &Currency { &*self.currency } fn credit_id(&self) -> &str { // a credit entity's id is also its credit id &self.id } fn settlement(&self) -> &Rc<DateRule> { &self.settlement } fn dependencies(&self, context: &mut DependencyContext) -> SpotRequirement { dependence_on_spot_discount(self, context); // for a credit entity, the spot is always one SpotRequirement::NotRequired } fn as_priceable(&self) -> Option<&Priceable> { Some(self) } } impl Display for CreditEntity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.id.fmt(f) } } impl Ord for CreditEntity { fn cmp(&self, other: &CreditEntity) -> Ordering { self.id.cmp(&other.id) } } impl PartialOrd for CreditEntity { fn partial_cmp(&self, other: &CreditEntity) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialEq for CreditEntity { fn eq(&self, other: &CreditEntity) -> bool { self.id == other.id } } impl Eq for CreditEntity {} impl Hash for CreditEntity { fn hash<H: Hasher>(&self, state: &mut H) { self.id.hash(state); } } impl Priceable for CreditEntity { fn as_instrument(&self) -> &Instrument { self } /// A credit entity is worth one currency unit, but only if we are /// discounting to the date which is when we would receive the currency. fn price(&self, context: &PricingContext) -> Result<f64, qm::Error> { discount_from_spot(self, context) } } #[cfg(test)] mod tests { use super::*; use math::numerics::approx_eq; use math::interpolation::Extrap; use data::forward::Forward; use data::volsurface::VolSurface; use data::curves::RateCurveAct365; use data::curves::RateCurve; use dates::calendar::WeekdayCalendar; use dates::rules::BusinessDays; use dates::Date; fn sample_currency(step: u32) -> Currency { let calendar = Rc::new(WeekdayCalendar::new()); let settlement = Rc::new(BusinessDays::new_step(calendar, step)); Currency::new("GBP", settlement) } fn sample_equity(currency: Rc<Currency>, step: u32) -> Equity { let calendar = Rc::new(WeekdayCalendar::new()); let settlement = Rc::new(BusinessDays::new_step(calendar, step)); Equity::new("BP.L", "LSE", currency, settlement) } struct SamplePricingContext { spot: f64 } impl PricingContext for SamplePricingContext { fn spot_date(&self) -> Date { Date::from_ymd(2018, 06, 01) } fn discount_date(&self) -> Option<Date> { Some(Date::from_ymd(2018, 06, 05)) } fn yield_curve(&self, _credit_id: &str, _high_water_mark: Date) -> Result<Rc<RateCurve>, qm::Error> { let d = Date::from_ymd(2018, 05, 30); let points = [(d, 0.05), (d + 14, 0.08), (d + 56, 0.09), (d + 112, 0.085), (d + 224, 0.082)]; let c = RateCurveAct365::new(d, &points, Extrap::Flat, Extrap::Flat)?; Ok(Rc::new(c)) } fn spot(&self, _id: &str) -> Result<f64, qm::Error> { Ok(self.spot) } fn forward_curve(&self, _instrument: &Instrument, _high_water_mark: Date) -> Result<Rc<Forward>, qm::Error> { Err(qm::Error::new("unsupported")) } fn vol_surface(&self, _instrument: &Instrument, _forward: Rc<Forward>, _high_water_mark: Date) -> Result<Rc<VolSurface>, qm::Error> { Err(qm::Error::new("unsupported")) } fn correlation(&self, _first: &Instrument, _second: &Instrument) -> Result<f64, qm::Error> { Err(qm::Error::new("unsupported")) } } fn sample_pricing_context(spot: f64) -> SamplePricingContext { SamplePricingContext { spot: spot } } #[test] fn test_equity_price_on_spot() { let spot = 123.4; let currency = Rc::new(sample_currency(2)); let equity = sample_equity(currency, 2); let context = sample_pricing_context(spot); let price = equity.price(&context).unwrap(); assert_approx(price, spot); } #[test] fn test_currency_price_on_spot() { let currency = sample_currency(2); let context = sample_pricing_context(123.4); let price = currency.price(&context).unwrap(); assert_approx(price, 1.0); } #[test] fn test_equity_price_mismatching_dates() { let spot = 123.4; let currency = Rc::new(sample_currency(3)); let equity = sample_equity(currency, 3); let context = sample_pricing_context(spot); let price = equity.price(&context).unwrap(); let df = 0.9997867155076675; assert_approx(price, spot * df); } #[test] fn test_currency_price_mismatching_dates() { let currency = sample_currency(3); let context = sample_pricing_context(123.4); let price = currency.price(&context).unwrap(); let df = 0.9997867155076675; assert_approx(price, df); } fn assert_approx(value: f64, expected: f64) { assert!(approx_eq(value, expected, 1e-12), "value={} expected={}", value, expected); } }
true
dafd31f7983e92f20ac95701dbcded2f7b3ed534
Rust
LycrusHamster/muta
/binding-macro/src/lib.rs
UTF-8
8,641
2.859375
3
[ "MIT" ]
permissive
extern crate proc_macro; mod common; mod cycles; mod hooks; mod read_write; mod service; use proc_macro::TokenStream; use crate::cycles::gen_cycles_code; use crate::hooks::verify_hook; use crate::read_write::verify_read_or_write; use crate::service::gen_service_code; #[rustfmt::skip] /// `#[genesis]` marks a service method to generate genesis states when fire up the chain /// /// Method input params should be `(&mut self)` or `(&mut self, payload: PayloadType)` /// /// # Example: /// /// ```rust /// struct Service; /// #[service] /// impl Service { /// #[genesis] /// fn init_genesis( /// &mut self, /// ) { /// do_work(); /// } /// } /// ``` /// /// Or /// /// ```rust /// struct Service; /// #[service] /// impl Service { /// #[genesis] /// fn init_genesis( /// &mut self, /// payload: PayloadType, /// ) { /// do_work(payload); /// } /// } /// ``` #[proc_macro_attribute] pub fn genesis(_: TokenStream, item: TokenStream) -> TokenStream { item } #[proc_macro_attribute] pub fn tx_hook_before(_: TokenStream, item: TokenStream) -> TokenStream { item } #[proc_macro_attribute] pub fn tx_hook_after(_: TokenStream, item: TokenStream) -> TokenStream { item } #[rustfmt::skip] /// `#[read]` marks a service method as readable. /// /// Methods marked with this macro will have: /// Methods with this macro allow access (readable) from outside (RPC or other services). /// /// - Verification /// 1. Is it a struct method marked with #[service]? /// 2. Is visibility private? /// 3. Parameter signature contains `&self and ctx: ServiceContext`? /// 4. Is the return value `ServiceResponse<T>`? /// /// # Example: /// /// ```rust /// struct Service; /// #[service] /// impl Service { /// #[read] /// fn test_read_fn( /// &self, /// _ctx: ServiceContext, /// ) -> ServiceResponse<String> { /// ServiceResponse::<String>::from_succeed("ok".to_owned()) /// } /// } /// ``` #[proc_macro_attribute] pub fn read(_: TokenStream, item: TokenStream) -> TokenStream { verify_read_or_write(item, false) } #[rustfmt::skip] /// `#[write]` marks a service method as writable. /// /// Methods marked with this macro will have: /// - Accessibility /// Methods with this macro allow access (writeable) from outside (RPC or other services). /// /// - Verification /// 1. Is it a struct method marked with #[service]? /// 2. Is visibility private? /// 3. Parameter signature contains `&self and ctx: ServiceContext`? /// 4. Is the return value `ServiceResponse<T>`? /// /// # Example: /// /// ```rust /// struct Service; /// #[service] /// impl Service { /// #[write] /// fn test_write_fn( /// &mut self, /// _ctx: ServiceContext, /// ) -> ServiceResponse<String> { /// ServiceResponse::<String>::from_succeed("ok".to_owned()) /// } /// } /// ``` #[proc_macro_attribute] pub fn write(_: TokenStream, item: TokenStream) -> TokenStream { verify_read_or_write(item, true) } #[rustfmt::skip] /// `# [cycles]` mark an `ImplFn` or `fn`, it will automatically generate code /// to complete the cycle deduction, /// /// ```rust /// // Source Code /// impl Tests { /// #[cycles(100)] /// fn test_cycles(&self, ctx: ServiceContext) -> ServiceResponse<()> { /// ServiceResponse::<()>::from_succeed(()) /// } /// } /// /// // Generated code. /// impl Tests { /// fn test_cycles(&self, ctx: ServiceContext) -> ServiceResponse<()> { /// ctx.sub_cycles(100); /// ServiceResponse::<()>::from_succeed(()) /// } /// } /// ``` #[proc_macro_attribute] pub fn cycles(attr: TokenStream, item: TokenStream) -> TokenStream { gen_cycles_code(attr, item) } /// Marks a method so that it executes after the entire block executes. // TODO(@yejiayu): Verify the function signature. #[proc_macro_attribute] pub fn hook_after(_: TokenStream, item: TokenStream) -> TokenStream { verify_hook(item) } /// Marks a method so that it executes before the entire block executes. // TODO(@yejiayu): Verify the function signature. #[proc_macro_attribute] pub fn hook_before(_: TokenStream, item: TokenStream) -> TokenStream { verify_hook(item) } #[rustfmt::skip] /// Marking a ImplItem for service, it will automatically trait /// `protocol::traits::Service`. /// /// # Example /// /// use serde::{Deserialize, Serialize}; /// use protocol::traits::ServiceSDK; /// use protocol::types::ServiceContext; /// use protocol::ProtocolResult; /// /// ```rust /// // Source code /// /// // serde::Deserialize and serde::Serialize are required. /// #[derive(Serialize, Deserialize)] /// struct CreateKittyPayload { /// // fields /// } /// /// // serde::Deserialize and serde::Serialize are required. /// #[derive(Serialize, Deserialize)] /// struct GetKittyPayload<SDK: ServiceSDK> { /// // fields /// } /// /// #[service] /// impl<SDK: ServiceSDK> KittyService<SDK> { /// #[hook_before] /// fn custom_hook_before(&mut self) { /// // Do something /// } /// /// #[hook_after] /// fn custom_hook_after(&mut self) { /// // Do something /// } /// /// #[read] /// fn get_kitty( /// &self, /// ctx: ServiceContext, /// payload: GetKittyPayload, /// ) -> ServiceResponse<String> { /// // Do something /// } /// /// #[write] /// fn create_kitty( /// &mut self, /// ctx: ServiceContext, /// payload: CreateKittyPayload, /// ) -> ServiceResponse<String> { /// // Do something /// } /// } /// /// // Generated code. /// impl<SDK: ServiceSDK> Service<SDK> for KittyService<SDK> { /// fn hook_before_(&mut self) { /// self.custom_hook_before() /// } /// /// fn hook_after(&mut self) { /// self.custom_hook_after() /// } /// /// fn write(&mut self, ctx: ServiceContext) -> ServiceResponse<String> { /// let method = ctx.get_service_method(); /// /// match ctx.get_service_method() { /// "create_kitty" => { /// let payload_res: Result<CreateKittyPayload, _> = serde_json::from_str(ctx.get_payload()); /// if payload_res.is_error() { /// return ServiceResponse::<String>::from_error(1, "service macro decode payload failed".to_owned()); /// }; /// let payload = payload_res.unwrap(); /// let res = self.#list_read_ident(ctx, payload); /// if !res.is_error() { /// let mut data_json = serde_json::to_string(&res.succeed_data).unwrap_or_else(|e| panic!("service macro encode payload failed: {:?}", e)); /// if data_json == "null" { /// data_json = "".to_owned(); /// } /// ServiceResponse::<String>::from_succeed(data_json) /// } else { /// ServiceResponse::<String>::from_error(res.code, res.error_message.clone()) /// } /// _ => panic!("service macro not found method:{:?} of service:{:?}", method, service), /// } /// } /// /// fn read(&self, ctx: ServiceContext) -> ProtocolResult<&str> { /// let method = ctx.get_service_method(); /// /// match ctx.get_service_method() { /// "get_kitty" => { /// let payload_res: Result<GetKittyPayload, _> = serde_json::from_str(ctx.get_payload()); /// if payload_res.is_error() { /// return ServiceResponse::<String>::from_error(1, "service macro decode payload failed".to_owned()); /// }; /// let payload = payload_res.unwrap(); /// let res = self.#list_read_ident(ctx, payload); /// if !res.is_error() { /// let mut data_json = serde_json::to_string(&res.succeed_data).unwrap_or_else(|e| panic!("service macro encode payload failed: {:?}", e)); /// if data_json == "null" { /// data_json = "".to_owned(); /// } /// ServiceResponse::<String>::from_succeed(data_json) /// } else { /// ServiceResponse::<String>::from_error(res.code, res.error_message.clone()) /// } /// _ => panic!("service macro not found method:{:?} of service:{:?}", method, service), /// } /// } /// } /// ``` #[proc_macro_attribute] pub fn service(attr: TokenStream, item: TokenStream) -> TokenStream { gen_service_code(attr, item) }
true
2ee5649a692960758e26f1dea74f5e37c2c65653
Rust
semargal/num-to-words
/src/utils.rs
UTF-8
218
3.140625
3
[ "Apache-2.0" ]
permissive
use crate::types::*; pub fn int_to_triplets(mut number: Int) -> Vec<Int> { let mut triplets = Vec::new(); while number > 0 { triplets.push(number % 1000); number /= 1000; } triplets }
true
7cace3920061be374e669ae33027e29958876737
Rust
feldim2425/AdventOfCode19
/day_09/main.rs
UTF-8
527
2.578125
3
[]
no_license
#[path = "../common/title.rs"] mod title; mod intcomputer; use std::fs; use intcomputer::*; fn solve_puzzle(mem: &Vec<i64>) { let result = run_program(mem, &vec![1]); println!("1.) {}", result.get_result().unwrap().outputs[0]); let result_2 = run_program(mem, &vec![2]); println!("2.) {}", result_2.get_result().unwrap().outputs[0]); } fn main(){ title::print_title(9, "Sensor Boost"); let mem = split_string(fs::read_to_string("day_09/program.txt").expect("File error!")); solve_puzzle(&mem); }
true
a9fa4e18755fb07cc3617570576d21091e79e7f0
Rust
automerge/automerge
/rust/automerge/benches/map.rs
UTF-8
8,485
2.5625
3
[ "MIT" ]
permissive
use automerge::{transaction::Transactable, Automerge, ScalarValue, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; fn repeated_increment(n: u64) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); tx.put(ROOT, "counter", ScalarValue::counter(0)).unwrap(); for _ in 0..n { tx.increment(ROOT, "counter", 1).unwrap(); } tx.commit(); doc } fn repeated_put(n: u64) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); for i in 0..n { tx.put(ROOT, "0", i).unwrap(); } tx.commit(); doc } fn increasing_put(n: u64) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); for i in 0..n { tx.put(ROOT, i.to_string(), i).unwrap(); } tx.commit(); doc } fn decreasing_put(n: u64) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); for i in (0..n).rev() { tx.put(ROOT, i.to_string(), i).unwrap(); } tx.commit(); doc } fn criterion_benchmark(c: &mut Criterion) { let sizes = [100, 1_000, 10_000]; let mut group = c.benchmark_group("map"); for size in &sizes { group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { b.iter(|| repeated_put(size)) }); group.bench_with_input( BenchmarkId::new("repeated increment", size), size, |b, &size| b.iter(|| repeated_increment(size)), ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("increasing put", size), size, |b, &size| b.iter(|| increasing_put(size)), ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("decreasing put", size), size, |b, &size| b.iter(|| decreasing_put(size)), ); } group.finish(); let mut group = c.benchmark_group("map save"); for size in &sizes { group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { b.iter_batched( || repeated_put(size), |doc| doc.save(), criterion::BatchSize::LargeInput, ) }); group.bench_with_input( BenchmarkId::new("repeated increment", size), size, |b, &size| { b.iter_batched( || repeated_increment(size), |doc| doc.save(), criterion::BatchSize::LargeInput, ) }, ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("increasing put", size), size, |b, &size| { b.iter_batched( || increasing_put(size), |doc| doc.save(), criterion::BatchSize::LargeInput, ) }, ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("decreasing put", size), size, |b, &size| { b.iter_batched( || decreasing_put(size), |doc| doc.save(), criterion::BatchSize::LargeInput, ) }, ); } group.finish(); let mut group = c.benchmark_group("map load"); for size in &sizes { group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { b.iter_batched( || repeated_put(size).save(), |bytes| Automerge::load(&bytes).unwrap(), criterion::BatchSize::LargeInput, ) }); group.bench_with_input( BenchmarkId::new("repeated increment", size), size, |b, &size| { b.iter_batched( || repeated_increment(size).save(), |bytes| Automerge::load(&bytes).unwrap(), criterion::BatchSize::LargeInput, ) }, ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("increasing put", size), size, |b, &size| { b.iter_batched( || increasing_put(size).save(), |bytes| Automerge::load(&bytes).unwrap(), criterion::BatchSize::LargeInput, ) }, ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("decreasing put", size), size, |b, &size| { b.iter_batched( || decreasing_put(size).save(), |bytes| Automerge::load(&bytes).unwrap(), criterion::BatchSize::LargeInput, ) }, ); } group.finish(); let mut group = c.benchmark_group("map apply"); for size in &sizes { group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { b.iter_batched( || { repeated_put(size) .get_changes(&[]) .into_iter() .cloned() .collect::<Vec<_>>() }, |changes| { let mut doc = Automerge::new(); doc.apply_changes(changes) }, criterion::BatchSize::LargeInput, ) }); group.bench_with_input( BenchmarkId::new("repeated increment", size), size, |b, &size| { b.iter_batched( || { repeated_increment(size) .get_changes(&[]) .into_iter() .cloned() .collect::<Vec<_>>() }, |changes| { let mut doc = Automerge::new(); doc.apply_changes(changes) }, criterion::BatchSize::LargeInput, ) }, ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("increasing put", size), size, |b, &size| { b.iter_batched( || { increasing_put(size) .get_changes(&[]) .into_iter() .cloned() .collect::<Vec<_>>() }, |changes| { let mut doc = Automerge::new(); doc.apply_changes(changes) }, criterion::BatchSize::LargeInput, ) }, ); group.throughput(criterion::Throughput::Elements(*size)); group.bench_with_input( BenchmarkId::new("decreasing put", size), size, |b, &size| { b.iter_batched( || { decreasing_put(size) .get_changes(&[]) .into_iter() .cloned() .collect::<Vec<_>>() }, |changes| { let mut doc = Automerge::new(); doc.apply_changes(changes) }, criterion::BatchSize::LargeInput, ) }, ); } group.finish(); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches);
true
47a6454a7c768353f8f37e6e6a021f2291efe339
Rust
luryus/cursive
/cursive-core/src/views/focus_tracker.rs
UTF-8
1,718
3.140625
3
[ "MIT" ]
permissive
use crate::{ direction::Direction, event::{Event, EventResult}, view::{CannotFocus, View, ViewWrapper}, With, }; /// Detects focus events for a view. pub struct FocusTracker<T> { view: T, on_focus_lost: Box<dyn FnMut(&mut T) -> EventResult>, on_focus: Box<dyn FnMut(&mut T) -> EventResult>, } impl<T> FocusTracker<T> { /// Wraps a view in a new `FocusTracker`. pub fn new(view: T) -> Self { FocusTracker { view, on_focus_lost: Box::new(|_| EventResult::Ignored), on_focus: Box::new(|_| EventResult::Ignored), } } /// Sets a callback to be run when the focus is gained. pub fn on_focus<F>(self, f: F) -> Self where F: 'static + FnMut(&mut T) -> EventResult, { self.with(|s| s.on_focus = Box::new(f)) } /// Sets a callback to be run when the focus is lost. pub fn on_focus_lost<F>(self, f: F) -> Self where F: 'static + FnMut(&mut T) -> EventResult, { self.with(|s| s.on_focus_lost = Box::new(f)) } } impl<T: View> ViewWrapper for FocusTracker<T> { wrap_impl!(self.view: T); fn wrap_take_focus( &mut self, source: Direction, ) -> Result<EventResult, CannotFocus> { match self.view.take_focus(source) { Ok(res) => Ok(res.and((self.on_focus)(&mut self.view))), Err(CannotFocus) => Err(CannotFocus), } } fn wrap_on_event(&mut self, event: Event) -> EventResult { let res = if let Event::FocusLost = event { (self.on_focus_lost)(&mut self.view) } else { EventResult::Ignored }; res.and(self.view.on_event(event)) } }
true
1e5fbe71ecf187abca4242e223dce834cf43858b
Rust
kevincox/rustymedia
/src/cache.rs
UTF-8
1,597
2.703125
3
[ "Apache-2.0" ]
permissive
use lru_cache; use smallvec; use std; #[derive(Debug)] struct Entry { format: crate::ffmpeg::Format, media: std::sync::Arc<dyn crate::Media>, } #[derive(Debug)] pub struct TranscodeCache { values: lru_cache::LruCache< String, smallvec::SmallVec<[Entry; 1]>>, } impl TranscodeCache { pub fn new() -> Self { TranscodeCache { values: lru_cache::LruCache::new(10), } } pub fn get(&mut self, exec: &crate::Executors, item: &Box<dyn crate::Object>, format: &crate::ffmpeg::Format, device: &crate::ffmpeg::Device, ) -> crate::Result<std::sync::Arc<dyn crate::Media>> { if format.compatible_with(device) { return item.body(&exec) } eprintln!("Cache size: {}", self.values.len()); match self.values.entry(item.id().to_owned()) { lru_cache::Entry::Occupied(mut e) => { for e in e.get_mut().iter_mut() { eprintln!("Transcode available: {:?}", e.format); if e.format.compatible_with(device) { eprintln!("Transcode cache hit!"); return Ok(e.media.clone()) } } let transcoded_format = format.transcode_for(device); let media = item.transcoded_body(&exec, &format, &transcoded_format)?; e.get_mut().push(Entry{format: transcoded_format, media: media.clone()}); Ok(media) } lru_cache::Entry::Vacant(e) => { eprintln!("Transcode cache miss!"); let transcoded_format = format.transcode_for(device); let media = item.transcoded_body(exec, &format, &transcoded_format)?; e.insert(smallvec::SmallVec::from_buf( [Entry{format: transcoded_format, media: media.clone()}])); Ok(media) } } } }
true
a22de3592e800f0286278dc50facbf5b8f21a6d9
Rust
AIRTucha/TinyRenderer.rs
/src/engine.rs
UTF-8
2,982
2.890625
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::common::Vec3; use crate::matrix::Matrix4x4; use std::f64; use std::vec; use std::vec::Vec; use wasm_bindgen::Clamped; use wasm_bindgen::JsCast; use web_sys::ImageData; pub struct Engine { width: u32, height: u32, context: web_sys::CanvasRenderingContext2d, } impl Engine { pub fn render(&self, img: &mut Scene) { self.context.put_image_data(&img.image(), 0.0, 0.0); } pub fn new(id: &str) -> Engine { let document = web_sys::window().unwrap().document().unwrap(); let canvas: web_sys::HtmlCanvasElement = document .get_element_by_id(id) .unwrap() .dyn_into::<web_sys::HtmlCanvasElement>() .map_err(|_| ()) .unwrap(); let context = canvas .get_context("2d") .unwrap() .unwrap() .dyn_into::<web_sys::CanvasRenderingContext2d>() .unwrap(); Engine { width: canvas.width(), height: canvas.height(), context: context, } } pub fn create_scene(&self) -> Scene { Scene::new(self.width as usize, self.height as usize) } } pub struct Scene { width: usize, height: usize, data_size: usize, image: Vec<u8>, z_buffer: Vec<Vec<f64>>, matrix: Matrix4x4, } impl Scene { pub fn dot(&mut self, x: usize, y: usize, z: f64, r: u8, g: u8, b: u8, a: u8) { if self.z_buffer[x][y] < z { let red_index = (self.width * y + x) * 4; self.image[red_index] = r; self.image[red_index + 1] = g; self.image[red_index + 2] = b; self.image[red_index + 3] = a; self.z_buffer[x][y] = z; } } pub fn image(&mut self) -> ImageData { ImageData::new_with_u8_clamped_array_and_sh( Clamped(&mut self.image), self.width as u32, self.height as u32, ) .unwrap() } pub fn clear(&mut self) { let mut i = 0; while i < self.data_size { self.image[i] = 0; self.image[i + 1] = 0; self.image[i + 2] = 0; self.image[i + 3] = 255; i += 4; } } pub fn scale(&self, vec: &Vec3) -> Vec3 { (*vec) * &self.matrix } pub fn new(width: usize, height: usize) -> Scene { let pixel_count = width * height; let data_size = pixel_count * 4; let widthf = width as f64; let heightf = height as f64; Scene { width: width, height: height, data_size: data_size, image: vec![0; data_size], z_buffer: vec![vec![-100.0; height]; width], matrix: Matrix4x4::new( [widthf / 2.0, 0.0, 0.0, widthf / 2.0], [0.0, -heightf / 2.0, 0.0, heightf / 2.0], [0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 0.0, 0.0], ), } } }
true
8a50277e12d27526220acabc8342583825a22eee
Rust
bwindsor22/thistle
/src/database/cosine_db.rs
UTF-8
1,487
3.265625
3
[]
no_license
use crate::database::embedding::get_embedding; use crate::database::db::{Operations, Doc}; pub struct CosineDB { pub docs: Vec<Doc>, } impl Operations for CosineDB { fn load(&mut self, texts: Vec<String>) { for text in texts { let vect = get_embedding(&text); self.docs.push(Doc { text: text, embedding: vect, score: 0.0, }); } } fn query(&self, query: String, n: u32) -> Vec<Doc> { let mut result = Vec::new(); let query_embedding = get_embedding(&query); for doc in self.docs.iter() { let score = cosine(&doc.embedding, &query_embedding); result.push(Doc { text: doc.text.clone(), embedding: doc.embedding.clone(), score: score, }); } result.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap()); result.drain(..n as usize).collect() } } fn cosine(vec1: &Vec<f64>, vec2: &Vec<f64>) -> f64 { let norms = norm(vec1) * norm(vec2); if norms > 0. { let res = dot(vec1, vec2) / (norm(vec1) * norm(vec2)); // For dev purposes println!("Cosine {}", res); return res; } return 0.; } fn dot(vec1: &Vec<f64>, vec2: &Vec<f64>) -> f64 { vec1.iter() .zip(vec2.iter()) .fold(0.0, |sum, (&v1, &v2)| sum + (v1 * v2)) } fn norm(a: &Vec<f64>) -> f64 { dot(a, a).sqrt() }
true
b82e84d842b6da507b489b5429124d4fdf57ec46
Rust
mbStavola/Voltaire
/src/action/scan.rs
UTF-8
3,280
2.796875
3
[]
no_license
use std::fs; use std::path::{Path, PathBuf}; pub fn execute(volatility_path: PathBuf, args: &super::ArgMatches) { let source = Path::new(args.value_of("source").unwrap()); let destination = Path::new(args.value_of("destination").unwrap()); let es = args.value_of("es").unwrap(); let profile = args.value_of("profile").unwrap(); if !source.exists() { panic!("Input file not found"); } if !destination.exists() { println!("Creating {}", destination.to_string_lossy()); fs::create_dir_all(&destination).unwrap(); } // Run through and execute each of these volatility commands let tests = vec!["pslist", "pstree", "netscan", "psxview", "consoles", "psscan", "mutantscan -s", "cmdscan", "dlllist", "filescan", "iehistory", "svcscan", "modules", "modscan", "sessions", "messagehooks", "windows", "wintree", "clipboard", "deskscan"]; for test in tests { println!("Starting {}", test); let outfile = format!("{}/ES{}_{}.txt", destination.to_str().unwrap(), es, test); let result = super::Command::new(&volatility_path) .arg("-f") .arg(&source) .arg(format!("--profile={}", &profile)) .arg(test) .arg(format!("--output-file={}", &outfile)) .output(); if let Ok(output) = result { if output.status.success() { println!("{}", String::from_utf8(output.stdout).unwrap()); println!("Successful execution of {}.", test) } else { println!("{}", String::from_utf8(output.stderr).unwrap()); println!("Failure executing {}. Exiting.", test); } } else { println!("Failure executing {}. Exiting.", test); } } // If we're running Voltaire on Windows, we can execute another test // ACTUALLY we need to check if it's a Windows profile if cfg!(target_os = "windows") { let outfile = format!("{}ES{}_autorun.txt", &destination.to_str().unwrap(), es); let result = super::Command::new(volatility_path) .arg("-f") .arg(source) .arg(format!("--profile={}", profile)) .arg("printkey") .arg(r#""Software\Microsoft\Windows\CurrentVersion\Run\""#) .arg(format!("--output-file={}", outfile)) .output(); if let Ok(output) = result { if output.status.success() { println!("{}", String::from_utf8(output.stdout).unwrap()); println!("Successful execution of autorun"); } else { println!("{}", String::from_utf8(output.stderr).unwrap()); println!("Failure executing autorun. Exiting."); } } else { println!("Failure executing autorun. Exiting."); } } }
true
e0690cadd7085c2832072b868681f0ff6753b509
Rust
micahhausler/jwtdecode-rust
/src/jwt.rs
UTF-8
1,725
3.484375
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use base64::decode; use serde::{Deserialize, Serialize}; use serde_json; use std::collections::HashMap; /// JWT represents a JSON web token #[derive(Serialize, Deserialize, Debug)] pub struct JWT { pub header: HashMap<String, String>, pub body: HashMap<String, serde_json::Value>, pub signature: String, token: String, } pub type JWTError = Box<dyn std::error::Error>; pub type JWTResult = Result<JWT, JWTError>; impl JWT { /// Return a new JWT for a given token string /// /// ``` /// let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJsc2t5d2Fsa2VyIiwiaWF0IjoyMzMzNjY0MDB9.k-tTF2CIZ-vu6-syRnCw3Zlc4jwfBCXAQRAyk0mtmso"; /// let result = jwtdecode::jwt::JWT::new(token.to_string()).unwrap(); /// assert_eq!(result.body.get("sub").unwrap(), "lskywalker"); /// ``` pub fn new(input_string: String) -> JWTResult { let parts: Vec<&str> = input_string.splitn(3, '.').collect::<Vec<&str>>(); if parts.len() != 3 { return Err(JWTError::from("Not enough parts for a valid jwt")); } let decoded_header = decode(parts[0])?; let header = serde_json::from_slice(&decoded_header)?; let decoded_body = decode(parts[1])?; let body = serde_json::from_slice(&decoded_body)?; Ok(JWT { header: header, body: body, signature: String::from(parts[2]), token: input_string, }) } } #[cfg(test)] mod tests { use crate::jwt::JWT; #[test] fn jwt_new_valid() { let valid_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c"; let result = JWT::new(valid_token.to_string()).unwrap(); assert_eq!(result.header.get("typ").unwrap(), "JWT"); assert_eq!(result.header.get("alg").unwrap(), "HS256"); assert_eq!(result.body.get("sub").unwrap(), "1234567890"); } }
true
0513a07d8ac8c393daee13fe358145d87cfd09fc
Rust
Pagliacii/sicp-reg-machine
/examples/ec_evaluator/supports/primitive.rs
UTF-8
3,625
3.046875
3
[ "MIT" ]
permissive
use reg_machine::{ machine::{ procedure::Procedure, value::{ToValue, Value}, }, make_proc, math, }; use super::{ io::display, list::{is_null_pair, list_ref, list_rest}, }; pub fn apply_primitive_procedure(proc: Vec<Value>, args: Vec<Value>) -> Value { let pair = &proc; if pair.len() < 2 || Value::new("primitive") != pair[0] { panic!( "Unable to apply this `proc` argument: {}.", Value::new(proc) ); } if !pair[1].is_procedure() { panic!("The `{}` isn't a primitive procedure.", pair[1]); } pair[1].perform(args).unwrap() } pub fn primitive_procedures() -> Vec<Procedure> { let mut procedures: Vec<Procedure> = vec![]; procedures.push(make_proc!("car", 1, |list: Value| list_ref(&list, 0))); procedures.push(make_proc!("cdr", 1, |list: Value| list_rest(&list, 1))); procedures.push(make_proc!("cons", 2, |head: Value, tail: Value| { let mut tail = tail.clone(); if let Value::List(l) = &mut tail { l.insert(0, head); tail } else { vec![head, tail, Value::Nil].to_value() } })); procedures.push(make_proc!("null?", 1, |pair: Value| is_null_pair(&pair))); procedures.push(Procedure::new("+", 0, math::addition)); procedures.push(Procedure::new("-", 1, math::subtraction)); procedures.push(Procedure::new("*", 0, math::multiplication)); procedures.push(Procedure::new("/", 1, math::division)); procedures.push(Procedure::new("=", 0, math::equal)); procedures.push(Procedure::new("<", 0, math::less_than)); procedures.push(Procedure::new(">", 0, math::greater_than)); procedures.push(Procedure::new("<=", 0, math::less_than_or_equal_to)); procedures.push(Procedure::new(">=", 0, math::greater_than_or_equal_to)); procedures.push(make_proc!("exit", |_| std::process::exit(0))); procedures.push(make_proc!("display", 1, |v: Value| display(&v))); procedures.push(make_proc!("newline", |_| println!())); // Support logical composition operations: `and`, `or` and `not`. procedures.push(Procedure::new("and", 0, |args| { for value in args.iter() { if value.is_false() { return false.to_value(); } } args.last().map_or_else(|| true.to_value(), |v| v.clone()) })); procedures.push(Procedure::new("or", 0, |args| { for value in args.iter() { if !value.is_bool() { return value.clone(); } if value.is_true() { return true.to_value(); } } false.to_value() })); procedures.push(Procedure::new("not", 1, |args| { if args.len() > 1 { panic!("The procedure #[not] has been called with {} arguments; it requires exactly 1 argument.", args.len()); } args[0].is_false() })); procedures.push(Procedure::new("list", 0, |args| args.to_value())); procedures } #[cfg(test)] mod primitive_tests { use super::super::environment::{get_global_environment, manipulate_env}; use super::*; use reg_machine::machine::value::TryFromValue; #[test] fn test_apply_primitive_procedure() { let env = usize::try_from(get_global_environment()).unwrap(); let proc = manipulate_env("lookup", env, &vec![Value::new("+")]); let res = apply_primitive_procedure( Vec::<Value>::try_from(proc).unwrap(), Value::new(vec![Value::new(1), Value::new(1)]), ); assert_eq!(Value::Num(2.0), res); } }
true
89c8a426522045e23bdad0878d5c09b425882af0
Rust
grumpyjames/aoc-2018
/src/nine.rs
UTF-8
2,311
3.15625
3
[]
no_license
extern crate regex; #[derive(Debug)] struct Player { score: usize } struct Entry { value: usize, prev: usize, next: usize, } fn main() { let player_count = 448; let marble_count= 7162800; let mut players = Vec::new(); for _i in 0..player_count { players.push(Player {score: 0}); } let mut storage : Vec<Entry> = Vec::with_capacity(marble_count); storage.push(Entry { value: 0, prev: 0, next: 0 }); let mut current_entry_index : usize = 0; for j in 1..marble_count { let mut current_player = players.get_mut(j % player_count).unwrap(); if j % 23 == 0 { current_player.score += j; let mut skip_count = 0; while skip_count < 7 { current_entry_index = storage.get_mut(current_entry_index).unwrap().prev; skip_count += 1; } let mut previous; let mut next; { let mut cur = storage.get_mut(current_entry_index).unwrap(); previous = cur.prev; next = cur.next; current_player.score += cur.value; } storage.get_mut(previous).unwrap().next = next; storage.get_mut(next).unwrap().prev = previous; current_entry_index = next; } else { let mut skip_count = 0; while skip_count < 2 { current_entry_index = storage.get_mut(current_entry_index).unwrap().next; skip_count += 1; } let new_index = storage.len(); let mut new_entry_previous; { let mut entry_after_new = storage.get_mut(current_entry_index).unwrap(); new_entry_previous = entry_after_new.prev; entry_after_new.prev = new_index; } storage.push( Entry { value: j, prev: new_entry_previous, next: current_entry_index }); storage.get_mut(new_entry_previous).unwrap().next = new_index; current_entry_index = new_index; } } players.sort_by(|p1, p2| p2.score.cmp(&p1.score)); println!("{:?}", players); }
true
16113fe8bfb7ddcab277e35006f128ff41ff871a
Rust
gnzlbg/is_sorted
/src/unsigned.rs
UTF-8
11,189
2.921875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Algorithms for unsigned integers #![allow(unused_attributes)] /// 128-bit-wide algorithm for slices of unsigned integers /// /// Note: /// * `_mm_load_si128` requires `SSE2` /// * `_mm_alignr_epi8` requires `SSSE3` /// * `_mm_and_si128` requires `SSE2` /// * `_mm_test_all_ones` requires `SSE4.1` macro_rules! unsigned_128 { ($name:ident, $cpuid:tt, $id:ident, $nlanes:expr, $cmpeq:ident, $minu:ident, $head:ident, $tail:ident) => { #[inline] #[target_feature(enable = $cpuid)] pub unsafe fn $name(s: &[$id]) -> usize { #[cfg(target_arch = "x86")] use arch::x86::*; #[cfg(target_arch = "x86_64")] use arch::x86_64::*; // The alignment requirements for 128-bit wide vectors is 16 bytes: const ALIGNMENT: usize = 16; let mut i: usize = $head!(s, $id, ALIGNMENT); // ^^^^^^ i is the index of the first element aligned to an // ALIGNMENT boundary let n = s.len(); let ap = |o| (s.as_ptr().offset(o as isize)) as *const __m128i; // Unroll factor: #of 128-bit vectors processed per loop iteration const NVECS: usize = 4; // #lanes in each 128-bit vector const NLANES: usize = $nlanes; // Stride: number of elements processed in each loop iteration // unroll_factor * #lane per vector const STRIDE: usize = NLANES * NVECS; // Minimum number of elements required for explicit vectorization. // Since we need one extra vector to get the last element, // this is #lanes * (unroll + 1) == stride + #lanes const MIN_LEN: usize = NLANES * (NVECS + 1); // Width of the vector lanes in bytes const EWIDTH: i32 = 128 / 8 / NLANES as i32; if (n - i) >= MIN_LEN { let mut current = _mm_load_si128(ap(i + 0 * NLANES)); // [a0,..,a3] while i < n - STRIDE { let next0 = _mm_load_si128(ap(i + 1 * NLANES)); // [a4,..,a7] let next1 = _mm_load_si128(ap(i + 2 * NLANES)); // [a8,..,a11] let next2 = _mm_load_si128(ap(i + 3 * NLANES)); // [a12,..,a15] let next3 = _mm_load_si128(ap(i + 4 * NLANES)); // [a16,..a19] let compare0 = _mm_alignr_epi8(next0, current, EWIDTH); // [a1,..,a4] let compare1 = _mm_alignr_epi8(next1, next0, EWIDTH); // [a5,..,a8] let compare2 = _mm_alignr_epi8(next2, next1, EWIDTH); // [a9,..,a12] let compare3 = _mm_alignr_epi8(next3, next2, EWIDTH); // [a13,..,a16] // a <= b <=> a == minu(a,b): // [a0 <= a1,..,a3 <= a4] let mask0 = $cmpeq(current, $minu(current, compare0)); // [a4 <= a5,..,a7 <= a8] let mask1 = $cmpeq(next0, $minu(next0, compare1)); // [a8 <= a9,..,a11 <= a12] let mask2 = $cmpeq(next1, $minu(next1, compare2)); // [a12 <= a13,..,a15 <= a16] let mask3 = $cmpeq(next2, $minu(next2, compare3)); // mask = mask0 && mask1 && mask2 && mask3 let mask = _mm_and_si128( _mm_and_si128(mask0, mask1), _mm_and_si128(mask2, mask3), ); // If the resulting mask has all bits set it means that all // the <= comparisons were succesfull: if _mm_test_all_ones(mask) == 0 { return i; } current = next3; i += STRIDE; } } $tail!(s, n, i) } }; } pub mod sse41 { // `_mm_cmpgt_epi32` requires `SSE2` // `_mm_min_epu32` requires `SSE4.1` unsigned_128!( is_sorted_lt_u32, "sse4.1", u32, 4, _mm_cmpeq_epi32, _mm_min_epu32, is_sorted_lt_until_alignment_boundary, is_sorted_lt_tail ); // `_mm_cmpgt_epi16` requires `SSE2` // `_mm_min_epu16` requires `SSE4.1` unsigned_128!( is_sorted_lt_u16, "sse4.1", u16, 8, _mm_cmpeq_epi16, _mm_min_epu16, is_sorted_lt_until_alignment_boundary, is_sorted_lt_tail ); // `_mm_cmpgt_epi8` requires `SSE2` // `_mm_min_epu8` requires `SSE2` unsigned_128!( is_sorted_lt_u8, "sse4.1", u8, 16, _mm_cmpeq_epi8, _mm_min_epu8, is_sorted_lt_until_alignment_boundary, is_sorted_lt_tail ); // `_mm_cmpgt_epi32` requires `SSE2` // `_mm_max_epu32` requires `SSE4.1` unsigned_128!( is_sorted_gt_u32, "sse4.1", u32, 4, _mm_cmpeq_epi32, _mm_max_epu32, is_sorted_gt_until_alignment_boundary, is_sorted_gt_tail ); // `_mm_cmpgt_epi16` requires `SSE2` // `_mm_max_epu16` requires `SSE4.1` unsigned_128!( is_sorted_gt_u16, "sse4.1", u16, 8, _mm_cmpeq_epi16, _mm_max_epu16, is_sorted_gt_until_alignment_boundary, is_sorted_gt_tail ); // `_mm_cmpgt_epi8` requires `SSE2` // `_mm_max_epu8` requires `SSE2` unsigned_128!( is_sorted_gt_u8, "sse4.1", u8, 16, _mm_cmpeq_epi8, _mm_max_epu8, is_sorted_gt_until_alignment_boundary, is_sorted_gt_tail ); } /// 256-bit wide algorithm for slices of unsigned integers /// /// Note: /// * `_mm256_load_si256` requires `AVX` /// * `_mm256_loadu_si256` requires `AVX` /// * `_mm256_and_si256` requires `AVX2` /// * `_mm256_testc_si256` requires `AVX` /// * `_mm256_set1_epi64x` requires `AVX` macro_rules! unsigned_256 { ($name:ident, $cpuid:tt, $id:ident, $nlanes:expr, $cmpeq:ident, $minu:ident, $head:ident, $tail:ident) => { #[inline] #[target_feature(enable = $cpuid)] pub unsafe fn $name(s: &[$id]) -> usize { #[cfg(target_arch = "x86")] use arch::x86::*; #[cfg(target_arch = "x86_64")] use arch::x86_64::*; // The alignment requirements for 256-bit wide vectors is 32 bytes: const ALIGNMENT: usize = 32; let mut i: usize = $head!(s, $id, ALIGNMENT); // ^^^^^^ i is the index of the first element aligned to an // ALIGNMENT boundary let n = s.len(); let ap = |o| (s.as_ptr().offset(o as isize)) as *const __m256i; // Unroll factor: #of 256-bit vectors processed per loop iteration const NVECS: usize = 4; // #lanes in each 256-bit vector const NLANES: usize = $nlanes; // Stride: number of elements processed in each loop iteration // unroll_factor * #lane per vector const STRIDE: usize = NLANES * NVECS; // Minimum number of elements required for explicit vectorization. // Since we need one extra vector to get the last element, // this is #lanes * (unroll + 1) == stride + #lanes const MIN_LEN: usize = NLANES * (NVECS + 1); if (n - i) >= MIN_LEN { while i < n - STRIDE { let current = _mm256_load_si256(ap(i + 0 * NLANES)); // [a0,..,a7] let next0 = _mm256_load_si256(ap(i + 1 * NLANES)); // [a8,..,a16] let next1 = _mm256_load_si256(ap(i + 2 * NLANES)); // [a16,..,a23] let next2 = _mm256_load_si256(ap(i + 3 * NLANES)); // [a24,..,a31] let compare0 = _mm256_loadu_si256(ap(i + 0 * NLANES + 1)); // [a1,..,a8] let compare1 = _mm256_loadu_si256(ap(i + 1 * NLANES + 1)); // [a9,..,a16] let compare2 = _mm256_loadu_si256(ap(i + 2 * NLANES + 1)); // [a17,..,a23] let compare3 = _mm256_loadu_si256(ap(i + 3 * NLANES + 1)); // [a25,..,a32] // a <= b <=> a == minu(a,b): // [a0 <= a1,..,a7 <= a8] let mask0 = $cmpeq(current, $minu(current, compare0)); // [a8 <= a9,..,a15 <= a16] let mask1 = $cmpeq(next0, $minu(next0, compare1)); // [a16 <= a17,.., a23 <= a24] let mask2 = $cmpeq(next1, $minu(next1, compare2)); // [a24 <= a25,..,a31 <= a32] let mask3 = $cmpeq(next2, $minu(next2, compare3)); // mask = mask0 && mask1 && mask2 && mask3 let mask = _mm256_and_si256( _mm256_and_si256(mask0, mask1), _mm256_and_si256(mask2, mask3), ); // If the resulting mask has all bits set it means that all // the <= comparisons were succesfull: if _mm256_testc_si256(mask, _mm256_set1_epi64x(-1)) == 0 { return i; } i += STRIDE; } } $tail!(s, n, i) } }; } pub mod avx2 { // `_mm256_cmpeq_epi32` requires `AVX2` // `_mm256_min_epu32` requires `AVX2` unsigned_256!( is_sorted_lt_u32, "avx2", u32, 8, _mm256_cmpeq_epi32, _mm256_min_epu32, is_sorted_lt_until_alignment_boundary, is_sorted_lt_tail ); // `_mm256_cmpeq_epi16` requires `AVX2` // `_mm256_min_epu16` requires `AVX2` unsigned_256!( is_sorted_lt_u16, "avx2", u16, 16, _mm256_cmpeq_epi16, _mm256_min_epu16, is_sorted_lt_until_alignment_boundary, is_sorted_lt_tail ); // `_mm256_cmpeq_epi8` requires `AVX2` // `_mm256_min_epu8` requires `AVX2` unsigned_256!( is_sorted_lt_u8, "avx2", u8, 32, _mm256_cmpeq_epi8, _mm256_min_epu8, is_sorted_lt_until_alignment_boundary, is_sorted_lt_tail ); // `_mm256_cmpeq_epi32` requires `AVX2` // `_mm256_max_epu32` requires `AVX2` unsigned_256!( is_sorted_gt_u32, "avx2", u32, 8, _mm256_cmpeq_epi32, _mm256_max_epu32, is_sorted_gt_until_alignment_boundary, is_sorted_gt_tail ); // `_mm256_cmpeq_epi16` requires `AVX2` // `_mm256_max_epu16` requires `AVX2` unsigned_256!( is_sorted_gt_u16, "avx2", u16, 16, _mm256_cmpeq_epi16, _mm256_max_epu16, is_sorted_gt_until_alignment_boundary, is_sorted_gt_tail ); // `_mm256_cmpeq_epi8` requires `AVX2` // `_mm256_max_epu8` requires `AVX2` unsigned_256!( is_sorted_gt_u8, "avx2", u8, 32, _mm256_cmpeq_epi8, _mm256_max_epu8, is_sorted_gt_until_alignment_boundary, is_sorted_gt_tail ); }
true
400056a434a3db49674dfb21adf12c9615bed5a5
Rust
taxmeifyoucan/cadr-guide
/src/build_system/dotnet.rs
UTF-8
4,916
2.625
3
[]
no_license
use std::io; use std::path::Path; use super::ExecutableSuggestion; fn deserialize_ignore_any<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result<(), D::Error> { use serde::Deserialize; serde::de::IgnoredAny::deserialize(deserializer)?; Ok(()) } pub fn suggest_executables(source_dir: &Path) -> Vec<ExecutableSuggestion> { use io::BufRead; #[derive(serde_derive::Deserialize)] struct PropertyGroup { #[serde(rename = "OutputType", default)] output_type: Option<String>, #[serde(rename = "Description", default)] description: Option<String>, #[serde(rename = "AssemblyTitle", default)] title: Option<String>, } #[derive(serde_derive::Deserialize)] enum Item { PropertyGroup(PropertyGroup), #[serde(other, deserialize_with = "deserialize_ignore_any")] Other, } #[derive(serde_derive::Deserialize)] struct CSProj { #[serde(rename = "$value")] items: Vec<Item>, } let mut executables = Vec::new(); for file in std::fs::read_dir(&source_dir).expect("failed to list source directory") { let file = file.expect("Failed to get file in source directory"); let path = file.path(); if path.extension() == Some("sln".as_ref()) { let sln = std::fs::File::open(&path).expect("Failed to open solution file"); let sln = io::BufReader::new(sln); for line in sln.lines() { let line = line.expect("failed to read sln"); if line.starts_with("Project(") { let eqpos = line.find('=').expect("Project entry in sln missing `=`"); let mut parts = line[(eqpos + 1)..].split(','); let bin_name = parts.next().expect("Missing name"); let bin_name = &bin_name.trim()[1..]; let bin_name = &bin_name[..(bin_name.len() - 1)]; let path_with_end_quote = &parts.next().expect("missing path").trim()[1..]; let path = &path_with_end_quote[..(path_with_end_quote.len() - 1)]; if path.ends_with(".csproj") { let csproj_rel_path = path.replace('\\', "/"); let path = source_dir.join(&csproj_rel_path); let csproj_str = std::fs::read_to_string(&path) .unwrap_or_else(|error| panic!("failed to read {}: {}", path.display(), error)); let csproj_trimmed = if csproj_str.starts_with("\u{feff}") { &csproj_str[3..] } else { &csproj_str }.trim(); let csproj = serde_xml_rs::from_reader::<_, CSProj>(csproj_trimmed.as_bytes()) .expect("failed to load csproj"); let is_exe = csproj.items.iter().any(|group| { match group { Item::PropertyGroup(PropertyGroup { output_type: Some(output_type), .. }) => output_type == "Exe", _ => false, } }); let summary = csproj.items.iter().find_map(|item| { match item { Item::PropertyGroup(PropertyGroup { title, .. }) => title.as_ref(), _ => None, } }).map(Clone::clone); let long_doc = csproj.items.iter().find_map(|item| { match item { Item::PropertyGroup(PropertyGroup { description, .. }) => description.as_ref(), _ => None, } }).map(Clone::clone); let (summary, long_doc) = match (summary, long_doc) { (None, Some(description)) => (Some(description), None), tuple => tuple, }; if is_exe { let suggestion = ExecutableSuggestion { path: bin_name.to_owned(), is_path_relative: true, is_in_destdir: true, is_arch_dependent: true, skip_debug_symbols: true, summary, long_doc, csproj: Some(csproj_rel_path), }; executables.push(suggestion); } } } } } } executables }
true
ad38a7606f982282e7ff576ce0b7936b470b0810
Rust
HectorIGH/Competitive-Programming
/Leetcode Challenge/07_July_2020/Rust/Week 3/3_Top K Frequent Elements.rs
UTF-8
1,191
3.328125
3
[]
no_license
//Given a non-empty array of integers, return the k most frequent elements. // //Example 1: // //Input: nums = [1,1,1,2,2,3], k = 2 //Output: [1,2] //Example 2: // //Input: nums = [1], k = 1 //Output: [1] //Note: // //You may assume k is always valid, 1 ≤ k ≤ number of unique elements. //Your algorithm's time complexity must be better than O(n log n), where n is the array's size. //It's guaranteed that the answer is unique, in other words the set of the top k frequent elements is unique. //You can return the answer in any order. use std::collections::{HashMap, BinaryHeap}; impl Solution { pub fn top_k_frequent(nums: Vec<i32>, k: i32) -> Vec<i32> { if nums.len() == k as usize { return nums; } let mut ans : Vec<i32> = vec![0; k as usize]; let mut freq = HashMap::new(); let mut heap : BinaryHeap<(i32, i32)> = BinaryHeap::new(); for n in &nums { *freq.entry(n).or_insert(0) += 1; } for (number, frequency) in freq { heap.push((frequency, *number)); } for i in 0..k as usize { ans[i] = heap.pop().unwrap().1; } return ans; } }
true
05bbdb4d96a553c485716978b22e3ca5c85593cb
Rust
ffahri/rust-server-finder
/src/main.rs
UTF-8
479
2.546875
3
[]
no_license
use reqwest::Error; #[tokio::main] async fn main() -> Result<(), Error> { println!("Hello, world!"); let client = reqwest::Client::new(); let resp = client.get("https://api.battlemetrics.com/servers") .query(&[ ("filter[game]", "rust"), ("filter[status]", "online"), ("filter[countries][]", "TR"), ("filter[players][min]", "50"), ]).send().await?.text().await?; println!("{}", resp); Ok(()) }
true
98bc2f413b64a934c5f686e7a168d8ec8f5204b4
Rust
wayslog/veda
/src/hmac/murmur3.rs
UTF-8
1,807
3.03125
3
[ "MIT" ]
permissive
const C1: u32 = 0x85eb_ca6b; const C2: u32 = 0xc2b2_ae35; const R1: u32 = 16; const R2: u32 = 13; const M: u32 = 5; const N: u32 = 0xe654_6b64; pub fn murmur3_32(source: &[u8], seed: u32) -> u32 { let buf = source.as_ref(); let mut processed = 0; let mut state = seed; let mut iter = buf.array_chunks::<4>(); while let Some(buffer) = iter.next() { processed += 4; let k = u32::from_le_bytes(*buffer); state ^= calc_k(k); state = state.rotate_left(R2); state = (state.wrapping_mul(M)).wrapping_add(N); } let buffer = iter.remainder(); match buffer.len() { 3 => { processed += 3; let k: u32 = ((buffer[2] as u32) << 16) | ((buffer[1] as u32) << 8) | (buffer[0] as u32); state ^= calc_k(k); } 2 => { processed += 2; let k: u32 = ((buffer[1] as u32) << 8) | (buffer[0] as u32); state ^= calc_k(k); } 1 => { processed += 1; let k: u32 = buffer[0] as u32; state ^= calc_k(k); } 0 => {} _ => panic!("Internal buffer state failure"), } finish(state, processed) } fn finish(state: u32, processed: u32) -> u32 { let mut hash = state; hash ^= processed as u32; hash ^= hash.wrapping_shr(R1); hash = hash.wrapping_mul(C1); hash ^= hash.wrapping_shr(R2); hash = hash.wrapping_mul(C2); hash ^= hash.wrapping_shr(R1); hash } fn calc_k(k: u32) -> u32 { const C1: u32 = 0xcc9e_2d51; const C2: u32 = 0x1b87_3593; const R1: u32 = 15; k.wrapping_mul(C1).rotate_left(R1).wrapping_mul(C2) } #[test] fn test_murmur3() { let x = murmur3_32("hello world".as_bytes(), 0); assert_eq!(x, 1586663183); }
true
d194d5959a6e18b6af4ae3578ae8fe3a1cb87eef
Rust
SINHASantos/grpc-rust
/grpc/src/bytesx/iter_buf.rs
UTF-8
1,616
3.078125
3
[ "MIT" ]
permissive
use bytes::Buf; use std::cmp; pub(crate) struct IterBuf<B: Buf, I: Iterator<Item = B>> { iter: I, next: Option<B>, rem: usize, } impl<B: Buf, I: Iterator<Item = B>> IterBuf<B, I> { pub fn new(iter: I, rem: usize) -> IterBuf<B, I> { let mut b = IterBuf { next: None, iter, rem, }; b.fill_next(); b } fn fill_next(&mut self) { loop { if let None = self.next { self.next = self.iter.next(); } match &mut self.next { None => return, Some(b) => { if b.has_remaining() { return; } } } } } } impl<B: Buf, I: Iterator<Item = B>> Buf for IterBuf<B, I> { fn remaining(&self) -> usize { self.rem } fn chunk(&self) -> &[u8] { match &self.next { Some(buf) => buf.chunk(), None => &[], } } fn advance(&mut self, cnt: usize) { while cnt != 0 { if let Some(buf) = &mut self.next { let min = cmp::min(cnt, buf.remaining()); buf.advance(min); self.rem -= min; if !buf.has_remaining() { self.next = None; } else { return; } } else { panic!("overflow"); } debug_assert!(self.next.is_none()); self.fill_next(); } } } #[cfg(test)] mod test {}
true
ccda2f0ea75886f16927526527d5bb5067500d9d
Rust
pcein/trust-rust
/code/interesting-crates/maplit-demo/src/main.rs
UTF-8
212
3.0625
3
[]
no_license
#[macro_use] extern crate maplit; fn main() { let fruits = hashmap! { "apple" => 100, "orange" => 120, "mango" => 130, }; println!("{:?}", fruits); }
true
a91db9607f04b0db156aa9874cf37dd1effdbf45
Rust
jtescher/aoc-2020
/src/day_14.rs
UTF-8
4,310
3.5
4
[]
no_license
use std::collections::HashMap; pub fn part_one(input: &str) -> anyhow::Result<usize> { let mut addresses = HashMap::new(); let mut mask = ""; for line in input.lines() { if line.starts_with("mask") { mask = line .splitn(2, " = ") .last() .ok_or(anyhow::anyhow!("expected mask, got {}", line))?; } else { let mut parts = line.split_terminator(" = "); let addr = parts .next() .and_then(|addr| { addr.trim_start_matches("mem[") .trim_end_matches("]") .parse::<usize>() .ok() }) .ok_or(anyhow::anyhow!("expected addr, got {}", line))?; let mut val = parts .next() .and_then(|val| val.parse::<usize>().ok()) .ok_or(anyhow::anyhow!("expected mem val, got {}", line))?; for (idx, c) in mask.chars().rev().enumerate() { if c == '1' { val = val | (1 << idx); } else if c == '0' { val = val & !(1 << idx); } } addresses.insert(addr, val); } } Ok(addresses.values().sum()) } pub fn part_two(input: &str) -> anyhow::Result<usize> { let mut addresses = HashMap::new(); let mut mask = ""; for line in input.lines() { if line.starts_with("mask") { mask = line .splitn(2, " = ") .last() .ok_or(anyhow::anyhow!("expected mask, got {}", line))?; } else { let mut parts = line.split_terminator(" = "); let mut addr = parts .next() .and_then(|addr| { addr.trim_start_matches("mem[") .trim_end_matches("]") .parse::<usize>() .ok() }) .ok_or(anyhow::anyhow!("expected addr, got {}", line))?; let val = parts .next() .and_then(|val| val.parse::<usize>().ok()) .ok_or(anyhow::anyhow!("expected mem val, got {}", line))?; let mut floating: Vec<Vec<(usize, usize)>> = vec![]; for (idx, c) in mask.chars().rev().enumerate() { if c == '1' { addr = addr | (1 << idx); } else if c == 'X' { if floating.is_empty() { floating.push(vec![(idx, 0)]); floating.push(vec![(idx, 1)]); } else { let mut floating2 = floating.clone(); for i in &mut floating { i.push((idx, 0)); } for j in &mut floating2 { j.push((idx, 1)); } floating.append(&mut floating2); } } } addresses.insert(addr, val); for addr_variant in floating { let mut new_addr = addr.clone(); for (idx, num) in addr_variant { if num == 1 { new_addr = new_addr | (1 << idx); } else { new_addr = new_addr & !(1 << idx); } } addresses.insert(new_addr, val); } } } Ok(addresses.values().sum()) } #[cfg(test)] mod tests { use super::*; #[test] fn examples() { let input = r"mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7] = 101 mem[8] = 0"; assert_eq!(part_one(input).unwrap(), 165); let input = r"mask = 000000000000000000000000000000X1001X mem[42] = 100 mask = 00000000000000000000000000000000X0XX mem[26] = 1"; assert_eq!(part_two(input).unwrap(), 208); } #[test] fn real_input() { let input = include_str!("../data/day_14.txt"); assert_eq!(part_one(input).unwrap(), 8570568288597); assert_eq!(part_two(input).unwrap(), 3289441921203); } }
true
253dc74faef6c537d2ab04c8140b725e65f9fb30
Rust
Larusso/unity-version-manager
/uvm_core/src/unity/hub/mod.rs
UTF-8
1,601
2.84375
3
[ "Apache-2.0" ]
permissive
pub mod editors; pub mod paths; use std::io; use crate::unity; use thiserror::Error; // #[derive(Error, Debug)] pub enum UvmHubError { #[error("unity hub config: '{0}' is missing")] ConfigNotFound(String), #[error("Unity Hub config directory missing")] ConfigDirectoryNotFound, #[error("failed to read Unity Hub config {config}")] ReadConfigError { config: String, source: anyhow::Error, }, #[error("can't write Unity Hub config: '{config}'")] WriteConfigError { config: String, source: anyhow::Error, }, #[error("failed to create config directory")] FailedToCreateConfigDirectory { source: std::io::Error, }, #[error("failed to create config file for config {config}")] FailedToCreateConfig { config: String, source: io::Error }, #[error("Unity Hub editor install path not found")] InstallPathNotFound, } type Result<T> = std::result::Result<T, UvmHubError>; pub fn list_installations() -> Result<unity::Installations> { let install_path = paths::install_path() .ok_or_else(|| UvmHubError::InstallPathNotFound)?; debug!("unity hub install path: {}", install_path.display()); let editors = editors::Editors::load()?; debug!("raw editors map: {:?}", editors); let editors = unity::Installations::from(editors); if let Ok(installations) = unity::list_installations_in_dir(&install_path) { let iter = installations.chain(editors); return Ok(unity::Installations(Box::new(iter))); } Ok(editors) }
true
ac14af1cef4716e639f93625345bf3967ffb28bf
Rust
rust-rosetta/rust-rosetta
/tasks/count-in-octal/src/main.rs
UTF-8
135
3.171875
3
[ "Unlicense" ]
permissive
use std::u8; fn main() { // We count from 0 to 255 (377 in octal) for i in 0..=u8::MAX { println!("{:o}", i); } }
true
86b3a27f4e34ea2e06bd62d6f4cd8116b59fcfe6
Rust
gleam-lang/gleam
/compiler-cli/src/export.rs
UTF-8
2,976
2.59375
3
[ "Apache-2.0" ]
permissive
use gleam_core::{ build::{Codegen, Mode, Options, Target}, Result, }; // TODO: start in embedded mode // TODO: test /// Generate a directory of precompiled Erlang along with a start script. /// Suitable for deployment to a server. /// /// For each Erlang application (aka package) directory these directories are /// copied across: /// - ebin /// - include /// - priv pub(crate) fn erlang_shipment() -> Result<()> { let paths = crate::project_paths_at_current_directory(); let target = Target::Erlang; let mode = Mode::Prod; let build = paths.build_directory_for_target(mode, target); let out = paths.erlang_shipment_directory(); crate::fs::mkdir(&out)?; // Reset the directories to ensure we have a clean slate and no old code crate::fs::delete_dir(&build)?; crate::fs::delete_dir(&out)?; // Build project in production mode let built = crate::build::main( Options { warnings_as_errors: false, codegen: Codegen::All, mode, target: Some(target), }, crate::build::download_dependencies()?, )?; for entry in crate::fs::read_dir(&build)?.filter_map(Result::ok) { let path = entry.path(); // We are only interested in package directories if !path.is_dir() { continue; } let name = path.file_name().expect("Directory name"); let build = build.join(name); let out = out.join(name); crate::fs::mkdir(&out)?; // Copy desired package subdirectories for subdirectory in ["ebin", "priv", "include"] { let source = build.join(subdirectory); if source.is_dir() { let source = crate::fs::canonicalise(&source)?; crate::fs::copy_dir(source, &out)?; } } } // Write entrypoint script let entrypoint = out.join("entrypoint.sh"); let text = include_str!("../templates/erlang-shipment-entrypoint.sh") .replace("$PACKAGE_NAME_FROM_GLEAM", &built.root_package.config.name); crate::fs::write(&entrypoint, &text)?; crate::fs::make_executable(&entrypoint)?; crate::cli::print_exported(&built.root_package.config.name); println!( " Your Erlang shipment has been generated to {path}. It can be copied to a compatible server with Erlang installed and run with the entrypoint.sh script. {entrypoint} ", path = out, entrypoint = entrypoint, ); Ok(()) } pub fn hex_tarball() -> Result<()> { let paths = crate::project_paths_at_current_directory(); let config = crate::config::root_config()?; let data: Vec<u8> = crate::publish::build_hex_tarball(&paths, &config)?; let path = paths.build_export_hex_tarball(&config.name, &config.version.to_string()); crate::fs::write_bytes(&path, &data)?; println!( " Your hex tarball has been generated in {}. ", &path ); Ok(()) }
true
5f97a2439ceec0ae964b9752bde5a60bf59395ab
Rust
richiprieto/Rust_examples
/Cap5/tuple_structs/src/main.rs
UTF-8
464
3.609375
4
[]
no_license
fn main() { //las estructuras tupla no tienen nombres //asociados a colos campos, solo tienen tipos //son utiles cuando se necesita dar un nombre //a la tupla y que sea diferente a otras especificas struct Color(i32, i32, i32); struct Point(i32, i32, i32); let black = Color(1, 0, 0); let origin = Point(0, 0, 0); // se puede acceder mediante el uso de "." seguido // del index println!("black en 0 es {}", black.0) }
true
bd89b53829df1913aa0ccd0a211337db8c3d1230
Rust
COLDTURNIP/raphanus_leetcode
/rust/src/p950.rs
UTF-8
2,614
3.859375
4
[]
no_license
/* Problem 950. Reveal Cards In Increasing Order ============================================= https://leetcode.com/problems/reveal-cards-in-increasing-order/ In a deck of cards, every card has a unique integer. You can order the deck in any order you want. Initially, all the cards start face down (unrevealed) in one deck. Now, you do the following steps repeatedly, until all cards are revealed: - Take the top card of the deck, reveal it, and take it out of the deck. - If there are still cards in the deck, put the next top card of the deck at the bottom of the deck. - If there are still unrevealed cards, go back to step 1. Otherwise, stop. - Return an ordering of the deck that would reveal the cards in increasing order. The first entry in the answer is considered to be the top of the deck. Example 1: Input: [17,13,11,2,3,5,7] Output: [2,13,3,11,5,17,7] Explanation: We get the deck in the order [17,13,11,2,3,5,7] (this order doesn't matter), and reorder it. After reordering, the deck starts as [2,13,3,11,5,17,7], where 2 is the top of the deck. We reveal 2, and move 13 to the bottom. The deck is now [3,11,5,17,7,13]. We reveal 3, and move 11 to the bottom. The deck is now [5,17,7,13,11]. We reveal 5, and move 17 to the bottom. The deck is now [7,13,11,17]. We reveal 7, and move 13 to the bottom. The deck is now [11,17,13]. We reveal 11, and move 17 to the bottom. The deck is now [13,17]. We reveal 13, and move 17 to the bottom. The deck is now [17]. We reveal 17. Since all the cards revealed are in increasing order, the answer is correct. Note: - 1 <= A.length <= 1000 - 1 <= A[i] <= 10^6 - A[i] != A[j] for all i != j */ use std::collections::VecDeque; impl Solution { pub fn deck_revealed_increasing(mut deck: Vec<i32>) -> Vec<i32> { deck.sort_unstable(); let mut ans = VecDeque::new(); for n in deck.into_iter().rev() { if let Some(tail) = ans.pop_back() { ans.push_front(tail); } ans.push_front(n); } ans.into() } } pub struct Solution; #[cfg(test)] mod tests { extern crate test; use super::Solution; #[test] fn test_reveal() { assert_eq!( Solution::deck_revealed_increasing(vec![17, 13, 11, 2, 3, 5, 7]), vec![2, 13, 3, 11, 5, 17, 7] ); } #[bench] fn bench(b: &mut test::Bencher) { b.iter(|| Solution::deck_revealed_increasing(vec![17, 13, 11, 2, 3, 5, 7])); } }
true
dc0486acba9b7c319f58f22057b6e725a37f075b
Rust
jonlamb-gh/openscad-models-rust
/wood-projects/couch/src/board.rs
UTF-8
1,919
2.96875
3
[ "MIT" ]
permissive
use dimdraw::{ObjectAssembler, ObjectDescriptor}; use scad::*; use board_dimensions::BoardDimensions; pub struct Board { dimensions: BoardDimensions, color: Option<String>, } impl Board { pub fn new(length: f32, width: f32, thickness: f32, color: Option<&'static str>) -> Self { let mc = if let Some(c) = color { Some(c.to_string()) } else { None }; Self { dimensions: BoardDimensions::new(length, width, thickness), color: mc, } } pub fn from_array(size: &[f32; 3], color: Option<&'static str>) -> Self { Self::new(size[0], size[1], size[2], color) } pub fn dims(&self) -> &BoardDimensions { &self.dimensions } } impl ObjectAssembler for Board { fn describe(&self) -> ObjectDescriptor { ObjectDescriptor { length: self.dimensions.length(), width: self.dimensions.width(), thickness: self.dimensions.thickness(), } } fn has_color(&self) -> bool { if let Some(_) = self.color { true } else { false } } fn object_color(&self) -> ScadObject { if let Some(ref c) = self.color { scad!(NamedColor(c.to_string())) } else { scad!(Color(vec3(0.0, 0.0, 0.0))) } } fn assemble(&self) -> ScadObject { if self.has_color() { let mut color_obj = self.object_color(); color_obj.add_child(scad!(Cube(vec3( self.dimensions.length(), self.dimensions.width(), self.dimensions.thickness(), )))); color_obj } else { scad!(Cube(vec3( self.dimensions.length(), self.dimensions.width(), self.dimensions.thickness(), ))) } } }
true
23ab0a371542550d5f758d2951f88a517e8c2480
Rust
purplg/orrient
/src/api/endpoints.rs
UTF-8
1,589
2.6875
3
[ "MIT" ]
permissive
use super::{AccountAchievement, Achievement, AllAccountAchievements, AllAchievementIDs, Dailies}; /// Represents how and where to access the requested data pub trait Endpoint<P> { /// Whether the endpoint requires an API key from the user const AUTHENTICATED: bool; /// Build a url path to the endpoint from the provided parameters fn get_path(param: Vec<&P>) -> String; } impl Endpoint<()> for AllAchievementIDs { const AUTHENTICATED: bool = false; fn get_path(_: Vec<&()>) -> String { "v2/achievements".to_string() } } impl Endpoint<usize> for Achievement { const AUTHENTICATED: bool = false; fn get_path(ids: Vec<&usize>) -> String { format!( "v2/achievements?ids={}", ids.iter() .map(|id| id.to_string()) .collect::<Vec<String>>() .join(",") ) } } impl Endpoint<usize> for AccountAchievement { const AUTHENTICATED: bool = true; fn get_path(ids: Vec<&usize>) -> String { format!( "v2/account/achievements?ids={}", ids.iter() .map(|id| id.to_string()) .collect::<Vec<String>>() .join(",") ) } } impl Endpoint<()> for AllAccountAchievements { const AUTHENTICATED: bool = true; fn get_path(_: Vec<&()>) -> String { "v2/account/achievements".to_string() } } impl Endpoint<()> for Dailies { const AUTHENTICATED: bool = false; fn get_path(_: Vec<&()>) -> String { "v2/achievements/daily".to_string() } }
true
fab951f36b616653c0c97c83f3dd3abbb4ceef07
Rust
hubert-levangong/Rust
/CC/Challenge5-bis/src/main.rs
UTF-8
2,514
3.03125
3
[]
no_license
use std::fs; use std::io::{Read, Write}; use std::error::Error; use base64::{decode, decode_config}; // // Additional testing to confirm // the complete chain from loading a Base64'ed binary file // all the way to decoding it using a multiple bytes key // fn main() { let filename = String::from("/Users/hubert/Documents/Github/Rust/CC/Challenge6/src/5-decoded.txt"); let mut fp = fs::File::open(filename).expect("Could not open the file"); let mut hexstring = String::new(); let nbread = fp.read_to_string(&mut hexstring).expect("Could not load the file"); println!("Read: {}", hexstring); let rawdata = hex::decode(hexstring).expect("Failed to decode string."); println!("raw data: {:?}", rawdata); let fileout = String::from("/Users/hubert/Documents/Github/Rust/CC/Challenge6/src/5-bin.txt"); let mut fp2 = fs::File::create(fileout).expect("Could not open the file"); let res = fp2.write(&rawdata).expect("Failed to write to file"); fp2.flush(); println!("Binary file from pretty-printed hex string generated and saved."); let clef= Vec::from("ICE"); let mut res : Vec<u8> = Vec::new(); encrypt_data_with_code(&rawdata, clef, &mut res); let result = String::from_utf8(res).expect("Could not turn result into a String"); println!("result:\n{}", result); println!("\nDecoding Base64'ed encrypted file..."); let filename2 = String::from("/Users/hubert/Documents/Github/Rust/CC/Challenge6/src/5-binB64.txt"); let mut fp2 = fs::File::open(filename2).expect("Could not open Base64'ed binary file."); let mut b64inp = String::new(); let nbread2 = fp2.read_to_string(&mut b64inp).expect("Failed to read the Base64'ed binary file."); let mut b64inpfixed = b64inp.replace("\n", ""); let rawdata2 = decode_config(&b64inpfixed, base64::STANDARD).expect("Error during Base64 decoding"); let clef2= Vec::from("ICE"); let mut res2 : Vec<u8> = Vec::new(); encrypt_data_with_code(&rawdata2, clef2, &mut res2); let result2 = String::from_utf8(res2).expect("Could not turn result into a String"); println!("result:\n{}", result2); } // Encrypt data using a multi-bytes key fn encrypt_data_with_code(input : &Vec<u8>, code : Vec<u8>, output : &mut Vec<u8>) -> bool { if output.len() != 0 { return false; } let keylength = code.len(); for (i, &item) in input.iter().enumerate() { let element = item ^ code[i % keylength]; output.push(element); } return true; }
true
37726f1fbdd9dbc387e849405bc3a5e7c7b58fdc
Rust
arun11299/Rust-Practice
/concurrency4.rs
UTF-8
858
3.015625
3
[]
no_license
use std::thread; use std::sync::{Mutex, Arc}; fn main() { let m = Arc::new(Mutex::new(0)); let m1 = Arc::clone(&m); let m2 = Arc::clone(&m1); let handle1 = thread::spawn(move || { loop { let mut num = m1.lock().unwrap(); *num = *num + 1; println!("Thread-1 value: {}", *num); if *num > 100 { break; } } }); let handle2 = thread::spawn(move || { loop { let mut num = m2.lock().unwrap(); *num = *num + 1; println!("Thread-2 value: {}", *num); if *num > 100 { break; } } }); println!("Shared value: {}", *m.lock().unwrap()); handle1.join().unwrap(); handle2.join().unwrap(); println!("Shared value: {}", *m.lock().unwrap()); }
true
f3f22c5a946ac7dde7ee73a28463ab6325bd1255
Rust
myarchsource/rust
/vendor/cookie_store/src/cookie_expiration.rs
UTF-8
7,367
3.234375
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0", "MIT", "LicenseRef-scancode-other-permissive", "BSD-3-Clause", "BSD-2-Clause", "NCSA" ]
permissive
use std; use std::ops::Deref; use serde::{Deserialize, Serialize}; use time::{self, Tm}; #[derive(PartialEq, Eq, Clone, Debug, Hash, PartialOrd, Ord)] pub struct SerializableTm(Tm); impl Deref for SerializableTm { type Target = time::Tm; fn deref(&self) -> &Self::Target { &self.0 } } impl From<Tm> for SerializableTm { fn from(tm: Tm) -> SerializableTm { SerializableTm(tm) } } /// When a given `Cookie` expires #[derive(PartialEq, Eq, Clone, Debug, Hash, PartialOrd, Ord, Serialize, Deserialize)] pub enum CookieExpiration { /// `Cookie` expires at the given UTC time, as set from either the Max-Age /// or Expires attribute of a Set-Cookie header AtUtc(SerializableTm), /// `Cookie` expires at the end of the current `Session`; this means the cookie /// is not persistent SessionEnd, } impl CookieExpiration { /// Indicates if the `Cookie` is expired as of *now*. pub fn is_expired(&self) -> bool { self.expires_by(&time::now_utc()) } /// Indicates if the `Cookie` expires as of `utc_tm`. pub fn expires_by(&self, utc_tm: &Tm) -> bool { match *self { CookieExpiration::AtUtc(ref expire_tm) => **expire_tm <= *utc_tm, CookieExpiration::SessionEnd => false, } } } impl From<u64> for CookieExpiration { fn from(max_age: u64) -> CookieExpiration { // If delta-seconds is less than or equal to zero (0), let expiry-time // be the earliest representable date and time. Otherwise, let the // expiry-time be the current date and time plus delta-seconds seconds. let utc_tm = if 0 == max_age { time::at_utc(time::Timespec::new(0, 0)) } else { // make sure we don't trigger a panic! in Duration by restricting the seconds // to the max let max_age = std::cmp::min(time::Duration::max_value().num_seconds() as u64, max_age); let utc_tm = time::now_utc() + time::Duration::seconds(max_age as i64); match time::strptime(&format!("{}", utc_tm.rfc3339()), "%Y-%m-%dT%H:%M:%SZ") { Ok(utc_tm) => utc_tm, Err(_) => time::strptime("9999-12-31T23:59:59Z", "%Y-%m-%dT%H:%M:%SZ") .expect("unable to strptime maximum value"), } }; CookieExpiration::from(utc_tm) } } impl From<time::Tm> for CookieExpiration { fn from(utc_tm: Tm) -> CookieExpiration { // format & re-parse the Tm to make sure de/serialization is consistent let utc_tm = match time::strptime(&format!("{}", utc_tm.rfc3339()), "%Y-%m-%dT%H:%M:%SZ") { Ok(utc_tm) => utc_tm, Err(_) => time::strptime("9999-12-31T23:59:59Z", "%Y-%m-%dT%H:%M:%SZ") .expect("unable to strptime maximum value"), }; CookieExpiration::AtUtc(SerializableTm::from(utc_tm)) } } impl From<time::Duration> for CookieExpiration { fn from(duration: time::Duration) -> Self { // If delta-seconds is less than or equal to zero (0), let expiry-time // be the earliest representable date and time. Otherwise, let the // expiry-time be the current date and time plus delta-seconds seconds. let utc_tm = if duration.is_zero() { time::at_utc(time::Timespec::new(0, 0)) } else { time::now_utc() + duration }; CookieExpiration::from(utc_tm) } } #[cfg(test)] mod tests { use super::CookieExpiration; use time; use crate::utils::test::*; #[test] fn max_age_bounds() { match CookieExpiration::from(time::Duration::max_value().num_seconds() as u64 + 1) { CookieExpiration::AtUtc(_) => assert!(true), _ => assert!(false), } } #[test] fn expired() { let ma = CookieExpiration::from(0u64); // Max-Age<=0 indicates the cookie is expired assert!(ma.is_expired()); assert!(ma.expires_by(&in_days(-1))); } #[test] fn max_age() { let ma = CookieExpiration::from(60u64); assert!(!ma.is_expired()); assert!(ma.expires_by(&in_minutes(2))); } #[test] fn session_end() { // SessionEnd never "expires"; lives until end of session let se = CookieExpiration::SessionEnd; assert!(!se.is_expired()); assert!(!se.expires_by(&in_days(1))); assert!(!se.expires_by(&in_days(-1))); } #[test] fn at_utc() { { let expire_tmrw = CookieExpiration::from(in_days(1)); assert!(!expire_tmrw.is_expired()); assert!(expire_tmrw.expires_by(&in_days(2))); } { let expired_yest = CookieExpiration::from(in_days(-1)); assert!(expired_yest.is_expired()); assert!(!expired_yest.expires_by(&in_days(-2))); } } } mod serde_serialization { use super::SerializableTm; use serde; use serde::de::{Deserializer, Visitor}; use std::fmt; use time; impl serde::Serialize for SerializableTm { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { serializer.serialize_str(&format!("{}", self.0.rfc3339())) } } impl<'a> serde::Deserialize<'a> for SerializableTm { fn deserialize<D>(deserializer: D) -> Result<SerializableTm, D::Error> where D: Deserializer<'a>, { deserializer.deserialize_str(TmVisitor) } } struct TmVisitor; impl<'a> Visitor<'a> for TmVisitor { type Value = SerializableTm; fn visit_str<E>(self, str_data: &str) -> Result<SerializableTm, E> where E: serde::de::Error, { time::strptime(str_data, "%Y-%m-%dT%H:%M:%SZ") .map(SerializableTm::from) .map_err(|_| { E::custom(format!( "could not parse '{}' as a UTC time in RFC3339 format", str_data )) }) } fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("datetime") } } #[cfg(test)] mod tests { use crate::cookie_expiration::CookieExpiration; use serde_json; use time; fn encode_decode(ce: &CookieExpiration, exp_json: &str) { let encoded = serde_json::to_string(ce).unwrap(); assert!( exp_json == encoded, "expected: '{}'\n encoded: '{}'", exp_json, encoded ); let decoded: CookieExpiration = serde_json::from_str(&encoded).unwrap(); assert!( *ce == decoded, "expected: '{:?}'\n decoded: '{:?}'", ce, decoded ); } #[test] fn serde() { let at_utc = time::strptime("2015-08-11T16:41:42Z", "%Y-%m-%dT%H:%M:%SZ").unwrap(); encode_decode( &CookieExpiration::from(at_utc), "{\"AtUtc\":\"2015-08-11T16:41:42Z\"}", ); encode_decode(&CookieExpiration::SessionEnd, "\"SessionEnd\""); } } }
true
3977eb1e4154c9ce33d2e561940e7fb68eb7fac7
Rust
lukaspustina/ifconfig-rs
/src/fairings.rs
UTF-8
1,100
2.671875
3
[ "MIT" ]
permissive
use rocket::{Data, Request, Response}; use rocket::fairing::{Fairing, Info, Kind}; use std::str::FromStr; use std::net::IpAddr; use std::net::SocketAddr; #[derive(Default)] pub struct HerokuForwardedFor; impl Fairing for HerokuForwardedFor { fn info(&self) -> Info { Info { name: "Set the request remote to Heroku's X-Forwarded-For", kind: Kind::Request | Kind::Response, } } fn on_request(&self, request: &mut Request, _: &Data) { let new_remote = if let Some(xfr) = request.headers().get_one("X-Forwarded-For") { if let Some(remote) = request.remote() { if let Ok(ip) = IpAddr::from_str(xfr) { Some(SocketAddr::new(ip, remote.port())) } else { None } } else { None } } else { None }; if let Some(remote) = new_remote { request.set_remote(remote); } } fn on_response(&self, _: &Request, _: &mut Response) { return; } }
true
166ebc25dfc1a8f0daa70ea6c3cdd8e1ad0d35cb
Rust
prataprc/gist
/rs/partial_ord.rs
UTF-8
208
3.21875
3
[ "MIT" ]
permissive
#[derive(PartialEq,PartialOrd,Default,Debug)] struct X { a: u32, b: u32, } fn main() { let x = X{a: 10, b: 20}; let y = X{a: 10, b: 20}; println!("{:?}", x); println!("{}", x < y); }
true
cac472a5c0abfc476bc427a8438d487c51550fda
Rust
sanderhahn/adventofcode2020-rs
/examples/day05b.rs
UTF-8
1,021
3.296875
3
[]
no_license
use std::{fs::File, io::BufRead, io::BufReader, io::Error}; fn parse_num(str: &str) -> u32 { let mut num = 0; for c in str.chars() { num <<= 1; num |= match c { 'F' => 0, 'B' => 1, 'R' => 1, 'L' => 0, c => panic!("invalid input {}", c), }; } num } fn find_seat(seats: Vec<u32>) -> u32 { for i in 1..seats.len() { if seats[i] + 1 != seats[i + 1] { return seats[i] + 1; } } panic!("not found"); } fn main() -> Result<(), Error> { let file = File::open("inputs/day5.txt")?; let lines = BufReader::new(file).lines().map(|line| line.unwrap()); assert_eq!(parse_num("FBFBBFFRLR"), 357); assert_eq!(parse_num("BFFFBBFRRR"), 567); assert_eq!(parse_num("FFFBBBFRRR"), 119); assert_eq!(parse_num("BBFFBBFRLL"), 820); let mut seats: Vec<u32> = lines.map(|line| parse_num(&line)).collect(); seats.sort(); println!("{}", find_seat(seats)); Ok(()) }
true
dc4a2407696acaee71594c6f3d87e87773b15650
Rust
newpavlov/rustrush-crypto-workshop
/pwhash/src/main.rs
UTF-8
1,002
2.9375
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
//! #Examples //! ```sh //! rustrush-pwhash pbkdf2 my_salt my_password //! ceQDgNQEkik+bmUXyPZUsuvP0xTJaAcWSJvRR4Ryb2I= //! ``` //! //! ```sh //! rustrush-pwhash argon2 my_salt my_password //! 0ebFEUWTY+Y+FaTeA/jbIP4Ofc83lnlk76Bol+CkPk8= //! ``` use structopt::StructOpt; use hmac::Hmac; use sha2::Sha256; mod cli; use self::cli::Cli; /// Compute password hash using PBKDF2-HMAC-SHA256 fn run_pbkdf2( password: &str, salt: &[u8], iterations: usize, length: usize, ) -> Vec<u8> { // TODO vec![] } /// Compute password hash using Argon2i fn run_argon2(salt: &str, password: &str) -> Vec<u8> { // TODO vec![] } fn main() { let opt = cli::Cli::from_args(); let hash = match &opt { Cli::Pbkdf2 { salt, password, iterations, length } => { run_pbkdf2(password, salt.as_bytes(), *iterations, *length) }, Cli::Argon2 { password, salt } => { run_argon2(password, salt) }, }; println!("{}", base64::encode(&hash)); }
true
c983cb5f59c361d7b7513f4936491b42e6ff9634
Rust
Symforian/University
/Rust/List_1/T4/square_area_to_circle.rs
UTF-8
712
3.546875
4
[]
no_license
fn square_area_to_circle(size:f64) -> f64 { let r = size.sqrt()/2f64; std::f64::consts::PI*r*r } fn main() { square_area_to_circle(5.0); } #[cfg(test)] mod tests { use super::square_area_to_circle; #[test] fn test1() { assert_eq!(square_area_to_circle(0.0), 0.0); } #[test] fn test2() { assert_eq!(square_area_to_circle(11.0), 8.63937979737193); } #[test] fn test3() { assert_eq!(square_area_to_circle(22.0), 17.278759594743864); } #[test] fn test4() { assert_eq!(square_area_to_circle(45.0), 35.34291735288517); } #[test] fn test5() { assert_eq!(square_area_to_circle(30.0), 23.56194490192345); } }
true
fc9dfed70f648924de06af58fac58c74c4f2b8e1
Rust
getsentry/symbolic
/symbolic-debuginfo/src/base.rs
UTF-8
30,288
3.09375
3
[ "MIT", "Apache-2.0" ]
permissive
use std::borrow::Cow; use std::fmt; use std::iter::FromIterator; use std::ops::{Bound, Deref, RangeBounds}; use std::str::FromStr; use symbolic_common::{clean_path, join_path, Arch, CodeId, DebugId, Name}; use crate::sourcebundle::SourceFileDescriptor; pub(crate) trait Parse<'data>: Sized { type Error; fn parse(data: &'data [u8]) -> Result<Self, Self::Error>; fn test(data: &'data [u8]) -> bool { Self::parse(data).is_ok() } } /// An error returned for unknown or invalid `ObjectKinds`. #[derive(Debug)] pub struct UnknownObjectKindError; impl fmt::Display for UnknownObjectKindError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "unknown object class") } } impl std::error::Error for UnknownObjectKindError {} /// Represents the designated use of the object file and hints at its contents. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Clone)] pub enum ObjectKind { /// There is no object class specified for this object file. None, /// The Relocatable file type is the format used for intermediate object /// files. It is a very compact format containing all its sections in one /// segment. The compiler and assembler usually create one Relocatable file /// for each source code file. By convention, the file name extension for /// this format is .o. Relocatable, /// The Executable file type is the format used by standard executable /// programs. Executable, /// The Library file type is for dynamic shared libraries. It contains /// some additional tables to support multiple modules. By convention, the /// file name extension for this format is .dylib, except for the main /// shared library of a framework, which does not usually have a file name /// extension. Library, /// The Dump file type is used to store core files, which are /// traditionally created when a program crashes. Core files store the /// entire address space of a process at the time it crashed. You can /// later run gdb on the core file to figure out why the crash occurred. Dump, /// The Debug file type designates files that store symbol information /// for a corresponding binary file. Debug, /// A container that just stores source code files, but no other debug /// information corresponding to the original object file. Sources, /// The Other type represents any valid object class that does not fit any /// of the other classes. These are mostly CPU or OS dependent, or unique /// to a single kind of object. Other, } impl ObjectKind { /// Returns the name of the object kind. pub fn name(self) -> &'static str { match self { ObjectKind::None => "none", ObjectKind::Relocatable => "rel", ObjectKind::Executable => "exe", ObjectKind::Library => "lib", ObjectKind::Dump => "dump", ObjectKind::Debug => "dbg", ObjectKind::Sources => "src", ObjectKind::Other => "other", } } /// Returns a human readable name of the object kind. /// /// This is also used in alternate formatting: /// /// ```rust /// # use symbolic_debuginfo::ObjectKind; /// assert_eq!(format!("{:#}", ObjectKind::Executable), ObjectKind::Executable.human_name()); /// ``` pub fn human_name(self) -> &'static str { match self { ObjectKind::None => "file", ObjectKind::Relocatable => "object", ObjectKind::Executable => "executable", ObjectKind::Library => "library", ObjectKind::Dump => "memory dump", ObjectKind::Debug => "debug companion", ObjectKind::Sources => "sources", ObjectKind::Other => "file", } } } impl fmt::Display for ObjectKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if f.alternate() { f.write_str(self.human_name()) } else { f.write_str(self.name()) } } } impl FromStr for ObjectKind { type Err = UnknownObjectKindError; fn from_str(string: &str) -> Result<ObjectKind, UnknownObjectKindError> { Ok(match string { "none" => ObjectKind::None, "rel" => ObjectKind::Relocatable, "exe" => ObjectKind::Executable, "lib" => ObjectKind::Library, "dump" => ObjectKind::Dump, "dbg" => ObjectKind::Debug, "src" => ObjectKind::Sources, "other" => ObjectKind::Other, _ => return Err(UnknownObjectKindError), }) } } /// An error returned for unknown or invalid [`FileFormats`](enum.FileFormat.html). #[derive(Debug)] pub struct UnknownFileFormatError; impl fmt::Display for UnknownFileFormatError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "unknown file format") } } impl std::error::Error for UnknownFileFormatError {} /// Represents the physical object file format. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Clone)] pub enum FileFormat { /// An unknown file format. Unknown, /// Breakpad ASCII symbol. Breakpad, /// Executable and Linkable Format, used on Linux. Elf, /// Mach Objects, used on macOS and iOS derivatives. MachO, /// Program Database, the debug companion format on Windows. Pdb, /// Portable Executable, an extension of COFF used on Windows. Pe, /// Source code bundle ZIP. SourceBundle, /// WASM container. Wasm, /// Portable PDB PortablePdb, } impl FileFormat { /// Returns the name of the file format. pub fn name(self) -> &'static str { match self { FileFormat::Unknown => "unknown", FileFormat::Breakpad => "breakpad", FileFormat::Elf => "elf", FileFormat::MachO => "macho", FileFormat::Pdb => "pdb", FileFormat::Pe => "pe", FileFormat::SourceBundle => "sourcebundle", FileFormat::Wasm => "wasm", FileFormat::PortablePdb => "portablepdb", } } } impl fmt::Display for FileFormat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.name()) } } impl FromStr for FileFormat { type Err = UnknownFileFormatError; fn from_str(string: &str) -> Result<FileFormat, UnknownFileFormatError> { Ok(match string { "breakpad" => FileFormat::Breakpad, "elf" => FileFormat::Elf, "macho" => FileFormat::MachO, "pdb" => FileFormat::Pdb, "pe" => FileFormat::Pe, "sourcebundle" => FileFormat::SourceBundle, "wasm" => FileFormat::Wasm, "portablepdb" => FileFormat::PortablePdb, _ => return Err(UnknownFileFormatError), }) } } /// A symbol from a symbol table. #[derive(Clone, Default, Eq, PartialEq)] pub struct Symbol<'data> { /// The name of the symbol. /// /// This name is generally mangled. It can be demangled by constructing a `Name` instance and /// calling demangle on it. Certain object files might only store demangled symbol names. pub name: Option<Cow<'data, str>>, /// The relative address of this symbol. pub address: u64, /// The size of this symbol, if known. /// /// When loading symbols from an object file, the size will generally not be known. Instead, /// construct a [`SymbolMap`] from the object, which also fills in sizes. /// /// [`SymbolMap`]: struct.SymbolMap.html pub size: u64, } impl<'data> Symbol<'data> { /// Returns the name of this symbol as string. pub fn name(&self) -> Option<&str> { self.name.as_ref().map(Cow::as_ref) } /// Determines whether the given address is covered by this symbol. /// /// If the symbol size has not been computed, the address is assumed to be covered if it is /// greated than the symbol address. Otherwise, the address must be in the half-open interval /// `[address, address + size)`. pub fn contains(&self, address: u64) -> bool { address >= self.address && (self.size == 0 || address < self.address + self.size) } } impl<'d> fmt::Debug for Symbol<'d> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Symbol") .field("name", &self.name().unwrap_or("<unknown>")) .field("address", &format_args!("{:#x}", self.address)) .field("size", &format_args!("{:#x}", self.size)) .finish() } } /// IntoIterator type for [`SymbolMap`](struct.SymbolMap.html). pub type SymbolMapIter<'data> = std::vec::IntoIter<Symbol<'data>>; /// A sorted list of symbols, suitable for quick lookups. /// /// This type can either be computed from a list or iterator of symbols, or preferrably directly /// by calling [`ObjectLike::symbol_map`] on any object. Symbols in the symbol map are guaranteed to /// have a `size` set, except for the last symbol, which is computed by taking the offset to the /// subsequent symbol. /// /// `SymbolMap` also exposes a read-only view on the sorted slice of symbols. It can be converted to /// and from lists of symbols. /// /// ## Example /// /// ```rust /// # use symbolic_debuginfo::{Symbol, SymbolMap}; /// let map = SymbolMap::from(vec![ /// Symbol { name: Some("A".into()), address: 0x4400, size: 0 }, /// Symbol { name: Some("B".into()), address: 0x4200, size: 0 }, /// Symbol { name: Some("C".into()), address: 0x4000, size: 0 }, /// ]); /// /// assert_eq!(map[0], Symbol { /// name: Some("C".into()), /// address: 0x4000, /// size: 0x200, /// }); /// ``` /// /// [`ObjectLike::symbol_map`]: trait.ObjectLike.html#tymethod.symbol_map #[derive(Clone, Debug, Default)] pub struct SymbolMap<'data> { symbols: Vec<Symbol<'data>>, } impl<'data> SymbolMap<'data> { /// Creates a new, empty symbol map. pub fn new() -> Self { SymbolMap { symbols: Vec::new(), } } /// Looks up the symbol covering the given address. pub fn lookup(&self, address: u64) -> Option<&Symbol<'data>> { match self.symbols.binary_search_by_key(&address, Self::key) { Ok(index) => Some(&self.symbols[index]), Err(0) => None, Err(next_index) => { let symbol = &self.symbols[next_index - 1]; if symbol.contains(address) { Some(symbol) } else { None } } } } /// Looks up a symbol by its start address. pub fn lookup_exact(&self, address: u64) -> Option<&Symbol<'data>> { let idx = self .symbols .binary_search_by_key(&address, Self::key) .ok()?; self.symbols.get(idx) } /// Looks up a symbol covering an entire range. /// /// This is similar to [`lookup`], but it only returns the symbol result if it _also_ covers the /// inclusive end address of the range. /// /// [`lookup`]: struct.SymbolMap.html#method.lookup pub fn lookup_range<R>(&self, range: R) -> Option<&Symbol<'data>> where R: RangeBounds<u64>, { let start = match range.start_bound() { Bound::Included(start) => *start, Bound::Excluded(start) => *start + 1, Bound::Unbounded => 0, }; let symbol = self.lookup(start)?; let end = match range.end_bound() { Bound::Included(end) => *end, Bound::Excluded(end) => *end - 1, Bound::Unbounded => u64::max_value(), }; if end <= start || symbol.contains(end) { Some(symbol) } else { None } } /// Returns the lookup key for a symbol, which is the symbol's address. #[inline(always)] fn key(symbol: &Symbol<'data>) -> u64 { symbol.address } } impl<'d> Deref for SymbolMap<'d> { type Target = [Symbol<'d>]; fn deref(&self) -> &Self::Target { &self.symbols } } impl<'data> IntoIterator for SymbolMap<'data> { type Item = Symbol<'data>; type IntoIter = SymbolMapIter<'data>; fn into_iter(self) -> Self::IntoIter { self.symbols.into_iter() } } impl<'data, 'a> IntoIterator for &'a SymbolMap<'data> { type Item = &'a Symbol<'data>; type IntoIter = std::slice::Iter<'a, Symbol<'data>>; fn into_iter(self) -> Self::IntoIter { self.symbols.iter() } } impl<'d> AsRef<[Symbol<'d>]> for SymbolMap<'d> { fn as_ref(&self) -> &[Symbol<'d>] { &self.symbols } } impl<'d> From<Vec<Symbol<'d>>> for SymbolMap<'d> { fn from(mut symbols: Vec<Symbol<'d>>) -> Self { if !symbols.is_empty() { // NB: This might require stable sorting to ensure determinism if multiple symbols point // at the same location. However, this only seems to happen for equivalent variants of // the same function. // // An example would be destructors where D2 (base object destructor) and D1 (complete // object destructor) might share the same code. Since those always demangle to the same // name, we do not care which function to keep in this case. // // Inlined functions will generally not appear in this list, unless they _also_ have an // explicit function body, in which case they will have a unique address, again. dmsort::sort_by_key(&mut symbols, Self::key); // Compute sizes of consecutive symbols if the size has not been provided by the symbol // iterator. In the same go, drop all but the first symbols at any given address. We do // not rely on the size of symbols in this case, since the ranges might still be // overlapping. symbols.dedup_by(|next, symbol| { if symbol.size == 0 { symbol.size = next.address - symbol.address; } symbol.address == next.address }) } SymbolMap { symbols } } } impl<'d> FromIterator<Symbol<'d>> for SymbolMap<'d> { fn from_iter<I>(iter: I) -> Self where I: IntoIterator<Item = Symbol<'d>>, { Vec::from_iter(iter).into() } } /// File information referred by [`LineInfo`](struct.LineInfo.html) comprising a directory and name. /// /// The file path is usually relative to a compilation directory. It might contain parent directory /// segments (`../`). #[derive(Clone, Default, Eq, PartialEq)] pub struct FileInfo<'data> { /// The file's basename. name: Cow<'data, [u8]>, /// Path to the file. dir: Cow<'data, [u8]>, } impl<'data> FileInfo<'data> { /// Creates a `FileInfo` with a given directory and the file name. #[cfg(feature = "dwarf")] pub fn new(dir: Cow<'data, [u8]>, name: Cow<'data, [u8]>) -> Self { FileInfo { name, dir } } /// Creates a `FileInfo` from a joined path by trying to split it. #[cfg(any(feature = "breakpad", feature = "ms", feature = "sourcebundle"))] pub fn from_path(path: &'data [u8]) -> Self { let (dir, name) = symbolic_common::split_path_bytes(path); FileInfo { name: Cow::Borrowed(name), dir: match dir { Some(dir) => Cow::Borrowed(dir), None => Cow::default(), }, } } /// Creates a `FileInfo` from a joined path by trying to split it. /// Unlike from_path(), copies the given data instead of referencing it. #[cfg(feature = "ppdb")] pub(crate) fn from_path_owned(path: &[u8]) -> Self { let (dir, name) = symbolic_common::split_path_bytes(path); FileInfo { name: Cow::Owned(name.to_vec()), dir: match dir { Some(dir) => Cow::Owned(dir.to_vec()), None => Cow::default(), }, } } /// Creates a `FileInfo` with the file name. pub fn from_filename(name: &'data [u8]) -> Self { FileInfo { name: Cow::Borrowed(name), dir: Cow::default(), } } /// The file name as UTF-8 string. pub fn name_str(&self) -> Cow<'data, str> { from_utf8_cow_lossy(&self.name) } /// Path to the file relative to the compilation directory. pub fn dir_str(&self) -> Cow<'data, str> { from_utf8_cow_lossy(&self.dir) } /// The full path to the file, relative to the compilation directory. pub fn path_str(&self) -> String { let joined = join_path(&self.dir_str(), &self.name_str()); clean_path(&joined).into_owned() } } #[allow(clippy::ptr_arg)] // false positive https://github.com/rust-lang/rust-clippy/issues/9218 pub(crate) fn from_utf8_cow_lossy<'data>(input: &Cow<'data, [u8]>) -> Cow<'data, str> { // See https://github.com/rust-lang/rust/issues/32669 match input { Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), Cow::Owned(bytes) => match String::from_utf8_lossy(bytes) { Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes.to_vec()) }.into(), Cow::Owned(s) => s.into(), }, } } impl fmt::Debug for FileInfo<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("FileInfo") .field("name", &self.name_str()) .field("dir", &self.dir_str()) .finish() } } /// File information comprising a compilation directory, relative path and name. pub struct FileEntry<'data> { /// Path to the compilation directory. File paths are relative to this. compilation_dir: Cow<'data, [u8]>, /// File name and path. pub info: FileInfo<'data>, } impl<'data> FileEntry<'data> { /// Path to the compilation directory. pub fn new(compilation_dir: Cow<'data, [u8]>, info: FileInfo<'data>) -> Self { FileEntry { compilation_dir, info, } } /// Path to the compilation directory. pub fn compilation_dir_str(&self) -> Cow<'data, str> { from_utf8_cow_lossy(&self.compilation_dir) } /// Absolute path to the file, including the compilation directory. pub fn abs_path_str(&self) -> String { let joined_path = join_path(&self.dir_str(), &self.name_str()); let joined = join_path(&self.compilation_dir_str(), &joined_path); clean_path(&joined).into_owned() } } impl fmt::Debug for FileEntry<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("FileInfo") .field("compilation_dir", &self.compilation_dir_str()) .field("name", &self.name_str()) .field("dir", &self.dir_str()) .finish() } } impl<'data> Deref for FileEntry<'data> { type Target = FileInfo<'data>; fn deref(&self) -> &Self::Target { &self.info } } /// File and line number mapping for an instruction address. #[derive(Clone, Eq, PartialEq)] pub struct LineInfo<'data> { /// The instruction address relative to the image base (load address). pub address: u64, /// Total code size covered by this line record. pub size: Option<u64>, /// File name and path. pub file: FileInfo<'data>, /// Absolute line number starting at 1. Zero means no line number. pub line: u64, } #[cfg(test)] impl LineInfo<'static> { pub(crate) fn new(address: u64, size: u64, file: &[u8], line: u64) -> LineInfo { LineInfo { address, size: Some(size), file: FileInfo::from_filename(file), line, } } } impl fmt::Debug for LineInfo<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut s = f.debug_struct("LineInfo"); s.field("address", &format_args!("{:#x}", self.address)); match self.size { Some(size) => s.field("size", &format_args!("{size:#x}")), None => s.field("size", &self.size), }; s.field("file", &self.file) .field("line", &self.line) .finish() } } /// Debug information for a function. #[derive(Clone)] pub struct Function<'data> { /// Relative instruction address of the start of the function. pub address: u64, /// Total code size covered by the function body, including inlined functions. pub size: u64, /// The name and language of the function symbol. pub name: Name<'data>, /// Path to the compilation directory. File paths are relative to this. pub compilation_dir: &'data [u8], /// Lines covered by this function, including inlined children. pub lines: Vec<LineInfo<'data>>, /// Functions that have been inlined into this function's body. pub inlinees: Vec<Function<'data>>, /// Specifies whether this function is inlined. pub inline: bool, } impl Function<'_> { /// End address of the entire function body, including inlined functions. /// /// This address points at the first instruction after the function body. pub fn end_address(&self) -> u64 { self.address.saturating_add(self.size) } } impl fmt::Debug for Function<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Function") .field("address", &format_args!("{:#x}", self.address)) .field("size", &format_args!("{:#x}", self.size)) .field("name", &self.name) .field( "compilation_dir", &String::from_utf8_lossy(self.compilation_dir), ) .field("lines", &self.lines) .field("inlinees", &self.inlinees) .field("inline", &self.inline) .finish() } } /// A dynamically dispatched iterator over items with the given lifetime. pub type DynIterator<'a, T> = Box<dyn Iterator<Item = T> + 'a>; /// A stateful session for interfacing with debug information. /// /// Debug sessions can be obtained via [`ObjectLike::debug_session`]. Since computing a session may /// be a costly operation, try to reuse the session as much as possible. /// /// ## Implementing DebugSession /// /// Reading debug information from object files usually requires loading multiple sections into /// memory and computing maps for quick random access to certain information. Since this can be a /// quite costly process, this is encapsulated into a `DebugSession`. The session may hold whatever /// data and caches may be necessary for efficiently interfacing with the debug info. /// /// All trait methods on a `DebugSession` receive `&mut self`, to allow mutation of internal cache /// structures. Lifetimes of returned types are tied to this session's lifetime, which allows to /// borrow data from the session. /// /// Examples for things to compute when building a debug session are: /// /// - Decompress debug information if it is stored with compression. /// - Build a symbol map for random access to public symbols. /// - Map string tables and other lookup tables. /// - Read headers of compilation units (compilands) to resolve cross-unit references. /// /// [`ObjectLike::debug_session`]: trait.ObjectLike.html#tymethod.debug_session pub trait DebugSession<'session> { /// The error returned when reading debug information fails. type Error; /// An iterator over all functions in this debug file. type FunctionIterator: Iterator<Item = Result<Function<'session>, Self::Error>>; /// An iterator over all source files referenced by this debug file. type FileIterator: Iterator<Item = Result<FileEntry<'session>, Self::Error>>; /// Returns an iterator over all functions in this debug file. /// /// Functions are iterated in the order they are declared in their compilation units. The /// functions yielded by this iterator include all inlinees and line records resolved. /// /// Note that the iterator holds a mutable borrow on the debug session, which allows it to use /// caches and optimize resources while resolving function and line information. fn functions(&'session self) -> Self::FunctionIterator; /// Returns an iterator over all source files referenced by this debug file. fn files(&'session self) -> Self::FileIterator; /// Looks up a file's source by its full canonicalized path. /// /// Returns a descriptor that has all the information available of the source. It can /// either contain the source contents directly, if it was embedded, or a source link. fn source_by_path(&self, path: &str) -> Result<Option<SourceFileDescriptor<'_>>, Self::Error>; } /// An object containing debug information. pub trait ObjectLike<'data, 'object> { /// Errors thrown when reading information from this object. type Error; /// A session that allows optimized access to debugging information. type Session: for<'session> DebugSession<'session, Error = Self::Error>; /// The iterator over the symbols in the public symbol table. type SymbolIterator: Iterator<Item = Symbol<'data>>; /// The container format of this file. fn file_format(&self) -> FileFormat; /// The code identifier of this object. /// /// The identifier can be `None` if it cannot be determined from the object file, for instance, /// because the identifier was stripped in the build process. fn code_id(&self) -> Option<CodeId>; /// The debug information identifier of this object. fn debug_id(&self) -> DebugId; /// The CPU architecture of this object. fn arch(&self) -> Arch; /// The kind of this object. fn kind(&self) -> ObjectKind; /// The address at which the image prefers to be loaded into memory. fn load_address(&self) -> u64; /// Determines whether this object exposes a public symbol table. fn has_symbols(&self) -> bool; /// Returns an iterator over symbols in the public symbol table. fn symbols(&'object self) -> Self::SymbolIterator; /// Returns an ordered map of symbols in the symbol table. fn symbol_map(&self) -> SymbolMap<'data>; /// Determines whether this object contains debug information. fn has_debug_info(&self) -> bool; /// Constructs a debugging session. /// /// A debugging session loads certain information from the object file and creates caches for /// efficient access to various records in the debug information. Since this can be quite a /// costly process, try to reuse the debugging session as long as possible. /// /// Constructing this session will also work if the object does not contain debugging /// information, in which case the session will be a no-op. This can be checked via /// [`has_debug_info`](trait.ObjectLike.html#tymethod.has_debug_info). fn debug_session(&'object self) -> Result<Self::Session, Self::Error>; /// Determines whether this object contains stack unwinding information. fn has_unwind_info(&self) -> bool; /// Determines whether this object contains embedded sources. fn has_sources(&self) -> bool; /// Determines whether this object is malformed and was only partially parsed fn is_malformed(&self) -> bool; } mod derive_serde { /// Helper macro to implement string based serialization and deserialization. /// /// If a type implements `FromStr` and `Display` then this automatically /// implements a serializer/deserializer for that type that dispatches /// appropriately. macro_rules! impl_str_serde { ($type:ty) => { impl ::serde::ser::Serialize for $type { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: ::serde::ser::Serializer, { serializer.serialize_str(self.name()) } } impl<'de> ::serde::de::Deserialize<'de> for $type { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: ::serde::de::Deserializer<'de>, { <::std::borrow::Cow<'_, str>>::deserialize(deserializer)? .parse() .map_err(::serde::de::Error::custom) } } }; } impl_str_serde!(super::ObjectKind); impl_str_serde!(super::FileFormat); } #[cfg(test)] mod tests { use super::*; use similar_asserts::assert_eq; fn file_info<'a>(dir: &'a str, name: &'a str) -> FileInfo<'a> { FileInfo::new( Cow::Borrowed(dir.as_bytes()), Cow::Borrowed(name.as_bytes()), ) } fn file_entry<'a>(compilation_dir: &'a str, dir: &'a str, name: &'a str) -> FileEntry<'a> { FileEntry::new( Cow::Borrowed(compilation_dir.as_bytes()), file_info(dir, name), ) } #[test] fn test_file_info() { assert_eq!(file_info("", "foo.h").path_str(), "foo.h"); assert_eq!( file_info("C:\\Windows", "foo.h").path_str(), "C:\\Windows\\foo.h" ); assert_eq!( file_info("/usr/local", "foo.h").path_str(), "/usr/local/foo.h" ); assert_eq!(file_info("/usr/local", "../foo.h").path_str(), "/usr/foo.h"); assert_eq!(file_info("/usr/local", "/foo.h").path_str(), "/foo.h"); } #[test] fn test_file_entry() { assert_eq!(file_entry("", "", "foo.h").abs_path_str(), "foo.h"); assert_eq!( file_entry("C:\\Windows", "src", "foo.h").abs_path_str(), "C:\\Windows\\src\\foo.h" ); assert_eq!( file_entry("/usr", "local", "foo.h").abs_path_str(), "/usr/local/foo.h" ); assert_eq!( file_entry("/usr/local", "..", "foo.h").abs_path_str(), "/usr/foo.h" ); assert_eq!( file_entry("/usr", "/src", "foo.h").abs_path_str(), "/src/foo.h" ); } }
true
921b7fc5d351977c5fd95c73e72e1b44056dab48
Rust
vain0x/procon
/rust/src/procon/graph/strong_component_decomposition.rs
UTF-8
2,459
2.984375
3
[ "CC0-1.0" ]
permissive
// Verified: http://judge.u-aizu.ac.jp/onlinejudge/review.jsp?rid=3429517#1 use std; pub struct StrongComponentDecomposition<'a> { /// Number of vertices. n: usize, /// The graph. g: &'a [Vec<usize>], /// Dual of the graph. h: Vec<Vec<usize>>, /// gray[u] = true if the vertex u is visited. gray: Vec<bool>, /// Topological order index for each vertex. top_ord: Vec<usize>, /// List of vertices in topological order. top_seq: Vec<usize>, /// Next index of topological order. next_ord: usize, /// Output of the process. components: Vec<Vec<usize>>, } impl<'a> StrongComponentDecomposition<'a> { pub fn run(g: &'a [Vec<usize>]) -> Vec<Vec<usize>> { let n = g.len(); let mut h = vec![vec![]; n]; for u in 0..n { for &v in &g[u] { h[v].push(u); } } let it = StrongComponentDecomposition { n: n, g: g, h: h, gray: vec![], top_ord: vec![n; n], top_seq: vec![], next_ord: 0, components: vec![], }; it.start() } fn start(mut self) -> Vec<Vec<usize>> { self.gray = vec![false; self.n]; for u in 0..self.n { self.top(u); } self.gray = vec![false; self.n]; let mut bot_seq = std::mem::replace(&mut self.top_seq, vec![]); bot_seq.reverse(); for u in bot_seq { if self.gray[u] { continue; } // Here u is the last vertex in topological order. let mut component = vec![]; self.bot(&mut component, u); self.components.push(component); } self.components } /// Topological sort. fn top(&mut self, u: usize) { if self.gray[u] { return; } self.gray[u] = true; for i in 0..self.g[u].len() { let v = self.g[u][i]; self.top(v); } self.top_seq.push(u); self.top_ord[u] = self.next_ord; self.next_ord += 1; } /// DFS over the dual. fn bot(&mut self, c: &mut Vec<usize>, u: usize) { if self.gray[u] { return; } self.gray[u] = true; c.push(u); for i in 0..self.h[u].len() { let v = self.h[u][i]; self.bot(c, v); } } }
true
16df30d81180ed9b83f35dd8c80cec60ff5485e1
Rust
virtualgraham/wasm-graph-app
/tests/graph/iterator/recursive_test.rs
UTF-8
4,282
2.59375
3
[]
no_license
use gizmo_graph_db::graph::iterator::fixed::{Fixed}; use gizmo_graph_db::graph::iterator::and::{And}; use gizmo_graph_db::graph::iterator::save::{tag}; use gizmo_graph_db::graph::iterator::recursive::{Recursive}; use gizmo_graph_db::graph::iterator::{Shape, Morphism}; use gizmo_graph_db::graph::refs::{pre_fetched, Namer}; use gizmo_graph_db::graph::value::{Value}; use gizmo_graph_db::graph::linksto::{LinksTo}; use gizmo_graph_db::graph::hasa::{HasA}; use gizmo_graph_db::graph::graphmock::{Store}; use gizmo_graph_db::graph::quad::{Quad, QuadStore, Direction}; use std::rc::Rc; use std::cell::RefCell; use std::collections::HashMap; struct SingleHop { qs: Rc<RefCell<dyn QuadStore>>, pred: String } impl Morphism for SingleHop { fn morph(&self, shape: Rc<RefCell<dyn Shape>>) -> Rc<RefCell<dyn Shape>> { let fixed = Fixed::new(vec![]); fixed.borrow_mut().add(pre_fetched(Value::from(self.pred.clone()))); let pred_lto = LinksTo::new(self.qs.clone(), fixed, Direction::Predicate); let lto = LinksTo::new(self.qs.clone(), shape.clone(), Direction::Subject); let and = And::new(vec![]); and.borrow_mut().add_sub_iterator(lto); and.borrow_mut().add_sub_iterator(pred_lto); return HasA::new(self.qs.clone(), and, Direction::Object) } } fn rec_test_qs() -> Store { Store { data: vec![ Quad::new("alice", "parent", "bob", ""), Quad::new("bob", "parent", "charlie", ""), Quad::new("charlie", "parent", "dani", ""), Quad::new("charlie", "parent", "bob", ""), Quad::new("dani", "parent", "emily", ""), Quad::new("fred", "follows", "alice", ""), Quad::new("greg", "follows", "alice", ""), ].into_iter().collect() } } #[test] fn test_recursive_next() { let qs = Rc::new(RefCell::new(rec_test_qs())); let start = Fixed::new(vec![]); start.borrow_mut().add(pre_fetched(Value::from("alice"))); let r = Recursive::new(start, Rc::new(SingleHop {qs: qs.clone(), pred: "parent".to_string()}), 0).borrow().iterate(); let mut expected = vec!["bob", "charlie", "dani", "emily"]; let mut got = Vec::new(); while r.borrow_mut().next() { got.push(r.borrow().result().unwrap().key().unwrap().to_string()); } expected.sort(); got.sort(); assert_eq!(expected, got); } #[test] fn test_recursive_contains() { let qs = Rc::new(RefCell::new(rec_test_qs())); let start = Fixed::new(vec![]); start.borrow_mut().add(pre_fetched(Value::from("alice"))); let r = Recursive::new(start, Rc::new(SingleHop {qs: qs.clone(), pred: "parent".to_string()}), 0).borrow().lookup(); let values = vec!["charlie", "bob", "not"]; let expected = vec![true, true, false]; for i in 0..values.len() { let v = values[i]; let value = qs.borrow().value_of(&Value::from(v)); let ok = value.is_some() && r.borrow_mut().contains(value.as_ref().unwrap()); assert_eq!(expected[i], ok); } } #[test] fn test_recursive_next_path() { let qs = Rc::new(RefCell::new(rec_test_qs())); let start = qs.borrow().nodes_all_iterator(); let start = tag(&start, &"person"); let it = SingleHop {qs: qs.clone(), pred: "follows".to_string()}.morph(start); let and = And::new(vec![]); and.borrow_mut().add_sub_iterator(it); let fixed = Fixed::new(vec![]); fixed.borrow_mut().add(pre_fetched(Value::from("alice"))); and.borrow_mut().add_sub_iterator(fixed); let r = Recursive::new(and, Rc::new(SingleHop {qs: qs.clone(), pred: "parent".to_string()}), 0).borrow().iterate(); let mut expected = vec!["fred", "fred", "fred", "fred", "greg", "greg", "greg", "greg"]; let mut got = Vec::new(); while r.borrow_mut().next() { let mut res = HashMap::new(); r.borrow().tag_results(&mut res); got.push(res[&"person".to_string()].key().unwrap().to_string()); while r.borrow_mut().next_path() { let mut res = HashMap::new(); r.borrow().tag_results(&mut res); got.push(res[&"person".to_string()].key().unwrap().to_string()); } } expected.sort(); got.sort(); assert_eq!(expected, got); }
true
6cfd429d2d521de43364ede911e5eb6f4730fd96
Rust
doubleduck98/uni
/rust/lista1/zad4/src/main.rs
UTF-8
1,230
3.53125
4
[]
no_license
fn main() { println!("{}", square_area_to_circle(14.0)); } fn square_area_to_circle(size: f64) -> f64 { size / 4.0 * std::f64::consts::PI } fn assert_close(a: f64, b: f64, epsilon: f64) { assert!( (a - b).abs() < epsilon, format!("Expected: {}, got: {}", b, a) ); } #[test] fn test0() { assert_close(square_area_to_circle(9.0), 7.0685834705770345, 1e-8); } #[test] fn test1() { assert_close(square_area_to_circle(20.0), 15.70796326794897, 1e-8); } #[test] fn test2() { assert_close(square_area_to_circle(16.0), 12.5663706144, 1e-8); } #[test] fn test3() { assert_close(square_area_to_circle(69.0), 54.1924732744, 1e-8); } #[test] fn test4() { assert_close(square_area_to_circle(71.0), 55.7632696012, 1e-8); } #[test] fn test5() { assert_close(square_area_to_circle(420.0), 329.867228627, 1e-8); } #[test] fn test6() { assert_close(square_area_to_circle(133.0), 104.457955732, 1e-8); } #[test] fn test7() { assert_close(square_area_to_circle(152.0), 119.380520836, 1e-8); } #[test] fn test8() { assert_close(square_area_to_circle(2115.0), 1661.11711559, 1e-8); } #[test] fn test9() { assert_close(square_area_to_circle(412412.0), 323907.62736306, 1e-5); }
true
2b6bdf8c7b798180744c0aebc4013c7c0390a532
Rust
IThawk/rust-project
/rust-data-struct/src/Algorithms/dynamic_programming/edit_distance.rs
UTF-8
4,874
3.6875
4
[ "Apache-2.0" ]
permissive
//! Compute the edit distance between two strings use std::cmp::min; /// edit_distance(str_a, str_b) returns the edit distance between the two /// strings This edit distance is defined as being 1 point per insertion, /// substitution, or deletion which must be made to make the strings equal. /// /// This function iterates over the bytes in the string, so it may not behave /// entirely as expected for non-ASCII strings. /// /// # Complexity /// /// - time complexity: O(nm), /// - space complexity: O(nm), /// /// where n and m are lengths of `str_a` and `str_b` pub fn edit_distance(str_a: &str, str_b: &str) -> u32 { // distances[i][j] = distance between a[..i] and b[..j] let mut distances = vec![vec![0; str_b.len() + 1]; str_a.len() + 1]; // Initialize cases in which one string is empty for j in 0..=str_b.len() { distances[0][j] = j as u32; } for (i, item) in distances.iter_mut().enumerate() { item[0] = i as u32; } for i in 1..=str_a.len() { for j in 1..=str_b.len() { distances[i][j] = min(distances[i - 1][j] + 1, distances[i][j - 1] + 1); if str_a.as_bytes()[i - 1] == str_b.as_bytes()[j - 1] { distances[i][j] = min(distances[i][j], distances[i - 1][j - 1]); } else { distances[i][j] = min(distances[i][j], distances[i - 1][j - 1] + 1); } } } distances[str_a.len()][str_b.len()] } /// The space efficient version of the above algorithm. /// /// Instead of storing the `m * n` matrix expicitly, only one row (of length `n`) is stored. /// It keeps overwriting itself based on its previous values with the help of two scalars, /// gradually reaching the last row. Then, the score is `matrix[n]`. /// /// # Complexity /// /// - time complexity: O(nm), /// - space complexity: O(n), /// /// where n and m are lengths of `str_a` and `str_b` pub fn edit_distance_se(str_a: &str, str_b: &str) -> u32 { let (str_a, str_b) = (str_a.as_bytes(), str_b.as_bytes()); let (m, n) = (str_a.len(), str_b.len()); let mut distances: Vec<u32> = vec![0; n + 1]; // the dynamic programming matrix (only 1 row stored) let mut s: u32; // distances[i - 1][j - 1] or distances[i - 1][j] let mut c: u32; // distances[i][j - 1] or distances[i][j] let mut char_a: u8; // str_a[i - 1] the i-th character in str_a; only needs to be computed once per row let mut char_b: u8; // str_b[j - 1] the j-th character in str_b // 0th row for (j, v) in distances.iter_mut().enumerate().take(n + 1).skip(1) { *v = j as u32; } // rows 1 to m for i in 1..=m { s = (i - 1) as u32; c = i as u32; char_a = str_a[i - 1]; for j in 1..=n { // c is distances[i][j-1] and s is distances[i-1][j-1] at the beginning of each round of iteration char_b = str_b[j - 1]; c = min( s + if char_a == char_b { 0 } else { 1 }, min(c + 1, distances[j] + 1), ); // c is updated to distances[i][j], and will thus become distances[i][j-1] for the next cell s = distances[j]; // here distances[j] means distances[i-1][j] becuase it has not been overwritten yet // s is updated to distances[i-1][j], and will thus become distances[i-1][j-1] for the next cell distances[j] = c; // now distances[j] is updated to distances[i][j], and will thus become distances[i-1][j] for the next ROW } } distances[n] } #[cfg(test)] mod tests { use super::*; #[test] fn equal_strings() { assert_eq!(0, edit_distance("Hello, world!", "Hello, world!")); assert_eq!(0, edit_distance_se("Hello, world!", "Hello, world!")); assert_eq!(0, edit_distance("Test_Case_#1", "Test_Case_#1")); assert_eq!(0, edit_distance_se("Test_Case_#1", "Test_Case_#1")); } #[test] fn one_edit_difference() { assert_eq!(1, edit_distance("Hello, world!", "Hell, world!")); assert_eq!(1, edit_distance("Test_Case_#1", "Test_Case_#2")); assert_eq!(1, edit_distance("Test_Case_#1", "Test_Case_#10")); assert_eq!(1, edit_distance_se("Hello, world!", "Hell, world!")); assert_eq!(1, edit_distance_se("Test_Case_#1", "Test_Case_#2")); assert_eq!(1, edit_distance_se("Test_Case_#1", "Test_Case_#10")); } #[test] fn several_differences() { assert_eq!(2, edit_distance("My Cat", "My Case")); assert_eq!(7, edit_distance("Hello, world!", "Goodbye, world!")); assert_eq!(6, edit_distance("Test_Case_#3", "Case #3")); assert_eq!(2, edit_distance_se("My Cat", "My Case")); assert_eq!(7, edit_distance_se("Hello, world!", "Goodbye, world!")); assert_eq!(6, edit_distance_se("Test_Case_#3", "Case #3")); } }
true
d5f608ff9a28f1debc0404e5034c869eef71e850
Rust
necauqua/quantum-loops
/src/states/main_menu.rs
UTF-8
2,993
2.53125
3
[ "MIT" ]
permissive
use noise::{NoiseFn, Perlin}; use crate::{ engine::{self, event::Event, ui::Button, *}, states::{ level_select::LevelMenuState, main_game::draw_background, options::OptionsState, scores::ScoresState, tutorial::TutorialState, }, QuantumLoops, }; use nalgebra::Vector2; #[derive(Debug)] pub struct Background { noise: Perlin, offset: f64, } impl Background { pub fn new() -> Self { Self { noise: Perlin::new(), offset: 0.0, } } pub fn on_update(&mut self, context: &Context<QuantumLoops>) { let nx = (self.noise.get([0.0, self.offset]) * 2.0 - 1.0) * 50.0; let ny = (self.noise.get([self.offset, 0.0]) * 2.0 - 1.0) * 50.0; draw_background(&context, [nx, ny].into()); self.offset += context.delta_time() / 5.0; } } #[derive(Debug)] pub struct MainMenuState { background: Background, play: Button, scores: Button, options: Button, exit: Button, } impl MainMenuState { pub fn new() -> Self { Self { background: Background::new(), play: Button::new("Play".into()), scores: Button::new("Scores".into()), options: Button::new("Options".into()), exit: Button::new("Exit".into()), } } } impl GameState<QuantumLoops> for MainMenuState { fn on_pushed(&mut self, context: &mut Context<QuantumLoops>) -> StateTransition<QuantumLoops> { context.sound_context_mut().sound_mask = context.storage().get_enabled_sounds(); StateTransition::None } fn on_event( &mut self, event: Event, context: &mut Context<QuantumLoops>, ) -> StateTransition<QuantumLoops> { if self.play.on_event(&event, context) { return StateTransition::Set(if context.storage().passed_tutorial { Box::new(LevelMenuState::new()) } else { Box::new(TutorialState::new()) }); } else if self.scores.on_event(&event, context) { return StateTransition::push(ScoresState::new()); } else if self.options.on_event(&event, context) { return StateTransition::set(OptionsState::new()); } else if self.exit.on_event(&event, context) { engine::window().history().unwrap().back().unwrap(); } StateTransition::None } fn on_update(&mut self, context: &mut Context<QuantumLoops>) -> StateTransition<QuantumLoops> { let center = context.surface().size() / 2.0; let offset: Vector2<f64> = [0.0, context.rem_to_px(2.5)].into(); context.game.sounds.background.play_unique(); self.background.on_update(context); self.play.on_update(context, center - offset * 2.0); self.scores.on_update(context, center - offset); self.options.on_update(context, center); self.exit.on_update(context, center + offset); StateTransition::None } }
true
d0da11ea7004503c67e34af9ab891eeb19ccd539
Rust
typed-io/cryptoxide
/src/sha1.rs
UTF-8
1,679
3.21875
3
[ "Apache-2.0", "MIT" ]
permissive
//! An implementation of the SHA-1 cryptographic hash algorithm. //! //! it is however discouraged to use this algorithm in any application as is, as this is //! not considered secured anymore. the algorithm is deprecated since 2011, and chosen prefix //! attack are practical. //! //! However the hash function is still pervasively used in other contextes where security is still //! ok (e.g. hmac-sha1), so on this basis is available here. //! //! # Example //! //! ``` //! use cryptoxide::{sha1::Sha1, digest::Digest}; //! //! let mut digest = [0u8; 20]; //! let mut context = Sha1::new(); //! context.input(b"hello world"); //! context.result(&mut digest); //! ``` use crate::digest::Digest; use crate::hashing::sha1; /// Structure representing the state of a Sha1 computation #[derive(Clone)] pub struct Sha1 { ctx: sha1::Context, computed: bool, } impl Sha1 { /// Construct a `sha` object pub const fn new() -> Sha1 { Sha1 { ctx: sha1::Sha1::new(), computed: false, } } } impl Digest for Sha1 { fn reset(&mut self) { self.ctx.reset(); self.computed = false; } fn input(&mut self, msg: &[u8]) { assert!(!self.computed, "context is already finalized, needs reset"); self.ctx.update_mut(msg); } fn result(&mut self, slice: &mut [u8]) { assert!(!self.computed, "context is already finalized, needs reset"); self.computed = true; slice.copy_from_slice(&self.ctx.finalize_reset()); } fn output_bits(&self) -> usize { sha1::Sha1::OUTPUT_BITS } fn block_size(&self) -> usize { sha1::Sha1::BLOCK_BYTES } }
true
0bb75eb0a955ef4f8c166399ede42b7d07890a6c
Rust
iobtl/raytrace-rs
/src/instances.rs
UTF-8
5,907
3.078125
3
[]
no_license
use crate::{ aabb::AABB, hittable::{HitModel, HitRecord, Hittable}, ray::Ray, utility::{degrees_to_radians, INFINITY}, vec3::Vec3, }; #[derive(Clone)] pub struct Translate<'a> { hit_model: Box<HitModel<'a>>, offset: Vec3, } impl<'a> Translate<'a> { pub fn new(hit_model: HitModel<'a>, offset: Vec3) -> Self { Translate { hit_model: Box::new(hit_model), offset } } } impl Hittable for Translate<'_> { fn hit(&self, r: &Ray, tmin: f32, tmax: f32) -> Option<HitRecord> { let origin = *r.origin(); let direction = *r.direction(); let time = r.time(); // Note: we subtract from the ray origin in this case, instead of modifying // the object coordinates let moved_r = Ray::new(origin - self.offset, direction, time); if let Some(mut hit_rec) = self.hit_model.hit(&moved_r, tmin, tmax) { hit_rec.p += self.offset; // add offset again? let front_face = HitRecord::face_normal(&moved_r, &hit_rec.normal); if front_face { Some(hit_rec) } else { hit_rec.normal *= -1.0; Some(hit_rec) } } else { None } } fn bounding_box(&self, t0: f32, t1: f32) -> Option<AABB> { if let Some(bbox) = self.hit_model.bounding_box(t0, t1) { Some(AABB::new(*bbox.min() + self.offset, *bbox.max() + self.offset)) } else { None } } } #[derive(Clone)] pub struct RotateY<'a> { hit_model: Box<HitModel<'a>>, sin_theta: f32, cos_theta: f32, has_box: bool, bbox: Option<AABB>, } impl<'a> RotateY<'a> { pub fn new(hit_model: HitModel<'a>, angle: f32) -> Self { let radians = degrees_to_radians(angle); let sin_theta = radians.sin(); let cos_theta = radians.cos(); let bbox = hit_model.bounding_box(0.0, 1.0); let has_box = bbox.is_some(); let mut min: [f32; 3] = [INFINITY; 3]; let mut max: [f32; 3] = [INFINITY, -INFINITY, -INFINITY]; if has_box { let bbox = bbox.unwrap(); for i in 0..2 { for j in 0..2 { for k in 0..2 { let i = i as f32; let j = j as f32; let k = k as f32; let x = i * bbox.max().x() + (1.0 - i) * bbox.min().x(); let y = j * bbox.max().y() + (1.0 - j) * bbox.min().y(); let z = k * bbox.max().z() + (1.0 - k) * bbox.min().z(); let new_x = cos_theta * x + sin_theta * z; let new_z = -sin_theta * x + cos_theta * z; let tester: [f32; 3] = [new_x, y, new_z]; for c in 0..3 { min[c] = min[c].min(tester[c]); max[c] = max[c].max(tester[c]); } } } } let min = Vec3::new(min[0], min[1], min[2]); let max = Vec3::new(max[0], max[1], max[2]); let bbox = Some(AABB::new(min, max)); RotateY { hit_model: Box::new(hit_model), sin_theta, cos_theta, has_box, bbox } } else { RotateY { hit_model: Box::new(hit_model), sin_theta, cos_theta, has_box, bbox } } } } impl<'a> Hittable for RotateY<'a> { fn hit(&self, r: &Ray, tmin: f32, tmax: f32) -> Option<HitRecord> { let origin = *r.origin(); let direction = *r.direction(); let cos_theta = self.cos_theta; let sin_theta = self.sin_theta; // Changing ray origin let new_origin_x = cos_theta * origin.x() - sin_theta * origin.z(); let new_origin_z = sin_theta * origin.x() + cos_theta * origin.z(); let new_dir_x = cos_theta * direction.x() - sin_theta * direction.z(); let new_dir_z = sin_theta * direction.x() + cos_theta * direction.z(); let rotated_r = Ray::new( Vec3::new(new_origin_x, origin.y(), new_origin_z), Vec3::new(new_dir_x, direction.y(), new_dir_z), r.time(), ); if let Some(mut hit_rec) = self.hit_model.hit(&rotated_r, tmin, tmax) { let new_p = Vec3::new( cos_theta * hit_rec.p.x() + sin_theta * hit_rec.p.z(), hit_rec.p.y(), -sin_theta * hit_rec.p.x() + cos_theta * hit_rec.p.z(), ); let new_normal = Vec3::new( cos_theta * hit_rec.normal.x() + sin_theta * hit_rec.normal.z(), hit_rec.normal.y(), -sin_theta * hit_rec.normal.x() + cos_theta * hit_rec.normal.z(), ); hit_rec.p = new_p; let front_face = HitRecord::face_normal(&rotated_r, &new_normal); if front_face { hit_rec.normal = new_normal; Some(hit_rec) } else { hit_rec.normal = -new_normal; Some(hit_rec) } } else { None } } fn bounding_box(&self, _: f32, _: f32) -> Option<AABB> { self.bbox } } #[derive(Clone)] pub struct FlipFace<'a> { hit_model: Box<HitModel<'a>>, } impl<'a> FlipFace<'a> { pub fn new(hit_model: HitModel<'a>) -> Self { FlipFace { hit_model: Box::new(hit_model) } } } impl Hittable for FlipFace<'_> { fn hit(&self, r: &Ray, tmin: f32, tmax: f32) -> Option<HitRecord> { // Flip light faces so normals point in -y direction. self.hit_model.hit(r, tmin, tmax).and_then(|mut rec| { rec.front_face = !rec.front_face; Some(rec) }) } fn bounding_box(&self, t0: f32, t1: f32) -> Option<AABB> { self.hit_model.bounding_box(t0, t1) } }
true
fea71dbce837fb397dfa84a4602e1003934c75d4
Rust
bouzuya/rust-atcoder
/cargo-atcoder/contests/abc136/src/bin/e.rs
UTF-8
818
2.953125
3
[]
no_license
use proconio::input; fn divisors(n: usize) -> Vec<usize> { let mut d = vec![]; for i in 1.. { if i * i > n { break; } if n % i == 0 { d.push(i); if i != n / i { d.push(n / i); } } } d.sort(); d } fn main() { input! { n: usize, k: usize, a: [usize; n], }; let sum_a = a.iter().sum::<usize>(); let ds = divisors(sum_a); let mut ans = 0; for g in ds { let mut r = a.iter().copied().map(|a| a % g).collect::<Vec<usize>>(); r.sort(); let sum_r = r.iter().sum::<usize>(); let count = r[0..(n - sum_r / g)].iter().sum::<usize>(); if count <= k { ans = ans.max(g); } } println!("{}", ans); }
true
952c546042da4cd9d7d9ae94929bfe539430c90c
Rust
ccdle12/kvs-db
/course-examples/project-3/src/client.rs
UTF-8
2,241
3.1875
3
[]
no_license
use crate::common::{GetResponse, RemoveResponse, Request, SetResponse}; use crate::{KvStoreError, Result}; use serde::Deserialize; use serde_json::de::{Deserializer, IoRead}; use std::io::{BufReader, BufWriter, Write}; use std::net::{TcpStream, ToSocketAddrs}; /// Key Value store client that reads and writes to a Key Value store server. pub struct KvsClient { reader: Deserializer<IoRead<BufReader<TcpStream>>>, writer: BufWriter<TcpStream>, } impl KvsClient { /// Connects to a server given an address. pub fn connect<A: ToSocketAddrs>(addr: A) -> Result<Self> { let reader = TcpStream::connect(addr)?; // Creates reference to the same stream but handled independently. let writer = reader.try_clone()?; Ok(KvsClient { reader: Deserializer::from_reader(BufReader::new(reader)), writer: BufWriter::new(writer), }) } /// Sets a key value pair at the server. pub fn set(&mut self, key: String, value: String) -> Result<()> { let request = Request::Set { key, value }; serde_json::to_writer(&mut self.writer, &request)?; self.writer.flush()?; let resp = SetResponse::deserialize(&mut self.reader)?; match resp { SetResponse::Ok(_) => Ok(()), SetResponse::Err(s) => Err(KvStoreError::StringError(s)), } } /// Get a value according to a key from the server. pub fn get(&mut self, key: String) -> Result<Option<String>> { let request = Request::Get { key }; serde_json::to_writer(&mut self.writer, &request)?; self.writer.flush()?; let res = GetResponse::deserialize(&mut self.reader)?; match res { GetResponse::Ok(r) => Ok(r), GetResponse::Err(_) => Err(KvStoreError::KeyNotFoundError), } } /// Removes a kv pair. pub fn remove(&mut self, key: String) -> Result<()> { serde_json::to_writer(&mut self.writer, &Request::Remove { key })?; self.writer.flush()?; match RemoveResponse::deserialize(&mut self.reader)? { RemoveResponse::Ok(r) => Ok(r), RemoveResponse::Err(e) => Err(KvStoreError::StringError(e)), } } }
true
9d9cbeb604893f124eefb47d3ea05b3f1016908f
Rust
bastiion/json-typedef-infer
/src/hints.rs
UTF-8
6,203
3.40625
3
[ "MIT" ]
permissive
use crate::inferred_number::NumType; /// Hints for [`Inferrer`][`crate::Inferrer`]. /// /// By default, [`Inferrer`][`crate::Inferrer`] will never produce enum, values, /// or discriminator forms. Hints tell [`Inferrer`][`crate::Inferrer`] to use /// these forms. See [`HintSet`] for details on how you can specify the "paths" /// to the pieces of the input that should use these forms. /// /// `default_num_type` tells [`Inferrer`][`crate::Inferrer`] what numeric type /// to attempt to use by default when it encounters a JSON number. This default /// will be ignored if it doesn't contain the example data. When the default is /// ignored, the inferrer will infer the narrowest numerical type possible for /// input data, preferring unsigned integers over signed integers. /// /// To adapt the example used at [the crate-level docs][`crate`], here's how you /// could change [`Inferrer`][`crate::Inferrer`] behavior using hints: /// /// ``` /// use serde_json::json; /// use jtd_infer::{Inferrer, Hints, HintSet, NumType}; /// /// let enum_path = vec!["bar".to_string()]; /// let mut inferrer = Inferrer::new(Hints::new( /// NumType::Float32, /// HintSet::new(vec![&enum_path]), /// HintSet::new(vec![]), /// HintSet::new(vec![]), /// )); /// /// inferrer = inferrer.infer(json!({ "foo": true, "bar": "xxx" })); /// inferrer = inferrer.infer(json!({ "foo": false, "bar": null, "baz": 5 })); /// /// let inference = inferrer.into_schema(); /// /// assert_eq!( /// json!({ /// "properties": { /// "foo": { "type": "boolean" }, /// "bar": { "enum": ["xxx"], "nullable": true }, // now an enum /// }, /// "optionalProperties": { /// "baz": { "type": "float32" }, // instead of uint8 /// }, /// }), /// serde_json::to_value(inference.into_serde_schema()).unwrap(), /// ) /// ``` pub struct Hints<'a> { default_num_type: NumType, enums: HintSet<'a>, values: HintSet<'a>, discriminator: HintSet<'a>, } impl<'a> Hints<'a> { /// Constructs a new set of [`Hints`]. pub fn new( default_num_type: NumType, enums: HintSet<'a>, values: HintSet<'a>, discriminator: HintSet<'a>, ) -> Self { Hints { default_num_type, enums, values, discriminator, } } pub(crate) fn default_num_type(&self) -> &NumType { &self.default_num_type } pub(crate) fn sub_hints(&self, key: &str) -> Self { Self::new( self.default_num_type.clone(), self.enums.sub_hints(key), self.values.sub_hints(key), self.discriminator.sub_hints(key), ) } pub(crate) fn is_enum_active(&self) -> bool { self.enums.is_active() } pub(crate) fn is_values_active(&self) -> bool { self.values.is_active() } pub(crate) fn peek_active_discriminator(&self) -> Option<&str> { self.discriminator.peek_active() } } const WILDCARD: &'static str = "-"; /// A set of paths to parts of the input that are subject to a hint in /// [`Hints`]. pub struct HintSet<'a> { values: Vec<&'a [String]>, } impl<'a> HintSet<'a> { /// Constructs a new [`HintSet`]. /// /// Each element of `values` is a separate "path". Each element of a path is /// treated as a path "segment". So, for example, this: /// /// ``` /// use jtd_infer::HintSet; /// /// let path1 = vec!["foo".to_string(), "bar".to_string()]; /// let path2 = vec!["baz".to_string()]; /// HintSet::new(vec![&path1, &path2]); /// ``` /// /// Creates a set of paths pointing to `/foo/bar` and `/baz` in an input. /// /// The `-` path segment value is special, and acts as a wildcard, matching /// any property name. It also matches array elements, unlike ordinary path /// segments. pub fn new(values: Vec<&'a [String]>) -> Self { HintSet { values } } pub(crate) fn sub_hints(&self, key: &str) -> Self { Self::new( self.values .iter() .filter(|values| { let first = values.first().map(String::as_str); first == Some(WILDCARD) || first == Some(key) }) .map(|values| &values[1..]) .collect(), ) } pub(crate) fn is_active(&self) -> bool { self.values.iter().any(|values| values.is_empty()) } pub(crate) fn peek_active(&self) -> Option<&str> { self.values .iter() .find(|values| values.len() == 1) .and_then(|values| values.first().map(String::as_str)) } } #[cfg(test)] mod tests { use super::*; #[test] fn hint_set() { let path = vec!["a".to_string(), "b".to_string(), "c".to_string()]; let hint_set = HintSet::new(vec![&path]); assert!(!hint_set.is_active()); assert_eq!(None, hint_set.peek_active()); assert!(!hint_set.sub_hints("a").is_active()); assert_eq!(None, hint_set.sub_hints("a").peek_active()); assert!(!hint_set.sub_hints("a").sub_hints("b").is_active()); assert_eq!( Some("c"), hint_set.sub_hints("a").sub_hints("b").peek_active() ); assert!(hint_set .sub_hints("a") .sub_hints("b") .sub_hints("c") .is_active()); assert_eq!( None, hint_set .sub_hints("a") .sub_hints("b") .sub_hints("c") .peek_active() ); } #[test] fn hint_set_wildcard() { let path1 = vec!["a".to_string(), "b".to_string(), "c".to_string()]; let path2 = vec!["d".to_string(), "-".to_string(), "e".to_string()]; let hint_set = HintSet::new(vec![&path1, &path2]); assert!(!hint_set .sub_hints("a") .sub_hints("x") .sub_hints("c") .is_active()); assert!(hint_set .sub_hints("d") .sub_hints("x") .sub_hints("e") .is_active()); } }
true
8e4f51d65a17a8ba031877c9206d029b92fe3163
Rust
bba7474/advent-2020
/day_09/src/main.rs
UTF-8
2,113
3.171875
3
[]
no_license
use std::fs; use std::io::{BufWriter, stdout}; use ferris_says::say; use itertools::iproduct; fn main() { let input = read_input(); let invalid_number = find_number_not_matching_rule(input.clone()); announce_answer(format!("{} is the first number that is not a sum of two of the previous 25 numbers", invalid_number)); let contiguous_sum_list = get_contiguous_list_summing_to(invalid_number, input.clone()); let min = contiguous_sum_list.iter().min().unwrap(); let max = contiguous_sum_list.iter().max().unwrap(); announce_answer(format!("{} is the sum of the min and max of the contiguous list that sums to the invalid number {}", min + max, invalid_number)); } fn find_number_not_matching_rule(xmas_data: Vec<i64>) -> i64 { for i in 25..xmas_data.len() - 1 { let to_sum = xmas_data[i - 25..i].to_vec(); if !has_pair_summing_to(xmas_data[i], to_sum) { return xmas_data[i]; } } return 0; } fn has_pair_summing_to(target: i64, to_sum: Vec<i64>) -> bool { iproduct!(to_sum.iter().cloned(), to_sum.iter().cloned()) .any(|(x, y)| x != y && x + y == target) } fn get_contiguous_list_summing_to(target: i64, xmas_data: Vec<i64>) -> Vec<i64> { for i in 0..xmas_data.len() - 1 { let mut sum = 0; let mut c_list = Vec::new(); for j in i..xmas_data.len() - 1 { let value = xmas_data[j]; sum += value; c_list.push(value); if sum >= target { break; } } if sum == target { return c_list; } } return Vec::new(); } fn read_input() -> Vec<i64> { fs::read_to_string("input.txt") .expect("Error reading file") .lines() .map(|s| s.parse().expect("not an integer")) .collect() } fn announce_answer(answer: String) { let message = format!("{}", answer).to_string(); let stdout = stdout(); let width = message.chars().count(); let mut writer = BufWriter::new(stdout.lock()); say(message.as_bytes(), width, &mut writer).unwrap(); }
true
33fc6f32efb0ada14bcfb036415789d6fa3d8531
Rust
zachwood0s/lambda
/src/main.rs
UTF-8
1,074
2.890625
3
[]
no_license
#[macro_use] extern crate clap; extern crate colored; extern crate dialoguer; use clap::{Arg, App}; pub mod lexer; pub mod parser; pub mod repl; pub mod errors; arg_enum!{ enum Mode{ Repl, Make } } fn main(){ //Convert mode options to lowercase let values = Mode::variants().iter().map(|c| c.to_lowercase()).collect::<Vec<_>>(); let values: Vec<&str> = values.iter().map(String::as_ref).collect(); let matches = App::new("lambda") .version("0.0.1") .author("Zach W. <zach@hayzak.com>") .about("Lambda Calculus Implementation") .arg(Arg::with_name("MODE") .help("What mode to run the program in") .index(1) .possible_values(&values) .required(true)) .get_matches(); match value_t!(matches.value_of("MODE"), Mode).unwrap(){ Mode::Repl => repl::start(), Mode::Make => println!("chose make mode"), } }
true
71dbbbce344a170be76e2afae321dcb7c61a52bb
Rust
JohnDoneth/coffee
/examples/image.rs
UTF-8
1,806
2.90625
3
[ "MIT" ]
permissive
use coffee::graphics::{ self, Color, Frame, HorizontalAlignment, VerticalAlignment, Window, WindowSettings, }; use coffee::load::Task; use coffee::ui::{ Align, Column, Element, Image, Justify, Renderer, Text, UserInterface, }; use coffee::{Game, Result, Timer}; pub fn main() -> Result<()> { <ImageScreen as UserInterface>::run(WindowSettings { title: String::from("ImageScreen - Coffee"), size: (1280, 1024), resizable: false, fullscreen: false, maximized: false, }) } struct ImageScreen { image: graphics::Image, } impl Game for ImageScreen { type Input = (); type LoadingScreen = (); fn load(_window: &Window) -> Task<ImageScreen> { graphics::Image::load("resources/ui.png") .map(|image| ImageScreen { image }) } fn draw(&mut self, frame: &mut Frame, _timer: &Timer) { frame.clear(Color { r: 0.3, g: 0.3, b: 0.6, a: 1.0, }); } } impl UserInterface for ImageScreen { type Message = (); type Renderer = Renderer; fn react(&mut self, _message: (), _window: &mut Window) {} fn layout(&mut self, window: &Window) -> Element<()> { Column::new() .width(window.width() as u32) .height(window.height() as u32) .align_items(Align::Center) .justify_content(Justify::Center) .spacing(20) .push( Text::new("This is an image") .size(50) .height(60) .horizontal_alignment(HorizontalAlignment::Center) .vertical_alignment(VerticalAlignment::Center), ) .push(Image::new(&self.image).height(250)) .into() } }
true
f29f3630b6aaf9ce039a7385e68fa31892a907d5
Rust
stormtracks/rust-examples
/nom/examples/t01.rs
UTF-8
426
3.328125
3
[ "MIT" ]
permissive
//https://iximiuz.com/en/posts/rust-writing-parsers-with-nom/ use nom::{bytes::complete::tag, IResult}; fn foo(s: &str) -> IResult<&str, &str> { tag("foo")(s) } fn main() { // this returns an error // let result = foo("rick foo bar"); let result = foo("foo bar"); println!("{:?}", result); } /* fn main() { assert_eq!(foo("foo bar"), Ok((" bar", "foo"))); assert!(foo("1234567").is_err()); } */
true
4b9e0e881fb46efce477055995b4a238b41fc218
Rust
senrust/algorithm
/chapter_10/priority_queue/src/main.rs
UTF-8
1,423
3.625
4
[]
no_license
fn maxheapfy(index: usize, heap: &mut Vec<i32>){ let mut largest_index = index; if index*2 <= heap.len() -1{ if heap[index] < heap[index*2] { largest_index = index*2; } } if index*2+1 <= heap.len() -1{ if heap[largest_index] < heap[index*2+1] { largest_index = index*2+1; } } if largest_index != index { heap.swap(index, largest_index); maxheapfy(largest_index, heap); } } fn order_insertion_key(index: usize, heap: &mut Vec<i32>) { if index <= 1{ return; } while heap[index] > heap[index/2] && index > 1{ heap.swap(index, index/2); } } fn insert(key: i32, heap: &mut Vec<i32>) { heap.push(key); order_insertion_key(heap.len() -1, heap); } fn extract(heap: &mut Vec<i32>) -> i32{ let heaplen = heap.len() - 1; heap.swap(1, heaplen); let value = heap.pop().unwrap(); maxheapfy(1, heap); value } fn main() { let mut heap_vec: Vec<i32> = Vec::new(); heap_vec.push(0); loop { let input_line: Vec<String> = input::read_numline(); if input_line[0] == "insert" { insert(input_line[1].parse::<i32>().ok().unwrap(), &mut heap_vec); } else if input_line[0] == "extract" { let value = extract(&mut heap_vec); println!("value is {}", value); } else { break; } } }
true
686c5c62ad3aa73116e21b6edc16632e5e1ce25d
Rust
aeolus3000/rust_stack
/src/utils/stack.rs
UTF-8
3,887
3.890625
4
[]
no_license
use crate::utils::pushpop::PushPop; const SIZE: usize = 4; pub struct Stack { buffer: Vec<i32>, pointer: usize } impl Stack { pub fn new() -> Stack { let buffer: Vec<i32> = Vec::new(); let pointer: usize = 0; let stack = Stack { buffer, pointer, }; stack } } impl PushPop for Stack { fn push(&mut self, value: i32) -> Result<&mut dyn PushPop, &str> { if self.is_full() { return Err("Can't push on stack because it is full") } self.buffer.push(value); self.pointer += 1; Ok(self) } fn pop(&mut self) -> Result<i32, &str> { if self.is_empty() { return Err("Can't pop from stack because it is empty") } self.pointer -= 1; Ok(self.buffer[self.pointer]) } fn size(&self) -> usize { self.pointer } fn is_empty(&self) -> bool { self.pointer == 0 } fn is_full(&self,) -> bool { self.pointer == SIZE } } #[cfg(test)] mod stack_test { use super::*; #[test] fn a_new_stack_should_be_empty() { //given: a new stack let stack = Stack::new(); assert_eq!(stack.size(), 0); } #[test] fn pushing_on_stack_should_be_visible_in_buffer() { let value = 15; //given: a new stack let mut stack = Stack::new(); assert_eq!(stack.buffer.len(), 0); let _ = stack.push(value); assert_eq!(stack.buffer[0], value); } #[test] fn pushing_on_stack_should_increment_pointer() { //given: a new stack let mut stack = Stack::new(); let _ = stack.push(15); assert_eq!(stack.pointer, 1); } #[test] fn popping_from_stack_should_return_correct_value() { //given: a new stack let mut stack = Stack::new(); let _ = stack.push(11); let _ = stack.push(12); let _ = stack.push(13); assert_eq!(stack.pop(), Ok(13)); assert_eq!(stack.pop(), Ok(12)); assert_eq!(stack.pop(), Ok(11)); } #[test] fn popping_from_stack_should_decrement_pointer() { //given: a new stack let mut stack = Stack::new(); let _ = stack.push(15); assert_eq!(stack.pointer, 1); let _ = stack.pop(); assert_eq!(stack.pointer, 0); } #[test] fn pushing_once_in_the_stack_results_in_expected_size() { //given: a new stack let mut stack = Stack::new(); let _ = stack.push(1); let _ = stack.push(2); assert_eq!(stack.size(), 2); } #[test] fn stack_is_empty_should_return_empty_state() { //given: a new stack let mut stack = Stack::new(); assert_eq!(stack.is_empty(), true); let _ = stack.push(5); assert_eq!(stack.is_empty(), false); let _ = stack.pop(); assert_eq!(stack.is_empty(), true); } #[test] fn stack_is_full_should_return_full_state() { //given: a new stack let mut stack = Stack::new(); assert_eq!(stack.is_full(), false); let _ = stack.push(5); let _ = stack.push(5); let _ = stack.push(5); let _ = stack.push(5); assert_eq!(stack.is_full(), true); let _ = stack.pop(); assert_eq!(stack.is_full(), false); } #[test] fn stack_push_should_return_error_when_full() { //given: a new stack let mut stack = Stack::new(); let _ = stack.push(5); let _ = stack.push(5); let _ = stack.push(5); let _ = stack.push(5); assert_eq!(stack.push(5).is_err(), true); } #[test] fn stack_pop_should_return_error_when_empty() { //given: a new stack let mut stack = Stack::new(); assert_eq!(stack.pop().is_err(), true); } }
true
62a6546d75af965ec9d83d80ebf62c4a5f1bdad3
Rust
LukasKalbertodt/libtest-mimic
/src/lib.rs
UTF-8
17,264
3.421875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Write your own tests and benchmarks that look and behave like built-in tests! //! //! This is a simple and small test harness that mimics the original `libtest` //! (used by `cargo test`/`rustc --test`). That means: all output looks pretty //! much like `cargo test` and most CLI arguments are understood and used. With //! that plumbing work out of the way, your test runner can focus on the actual //! testing. //! //! For a small real world example, see [`examples/tidy.rs`][1]. //! //! [1]: https://github.com/LukasKalbertodt/libtest-mimic/blob/master/examples/tidy.rs //! //! # Usage //! //! To use this, you most likely want to add a manual `[[test]]` section to //! `Cargo.toml` and set `harness = false`. For example: //! //! ```toml //! [[test]] //! name = "mytest" //! path = "tests/mytest.rs" //! harness = false //! ``` //! //! And in `tests/mytest.rs` you would call [`run`] in the `main` function: //! //! ```no_run //! use libtest_mimic::{Arguments, Trial}; //! //! //! // Parse command line arguments //! let args = Arguments::from_args(); //! //! // Create a list of tests and/or benchmarks (in this case: two dummy tests). //! let tests = vec![ //! Trial::test("succeeding_test", move || Ok(())), //! Trial::test("failing_test", move || Err("Woops".into())), //! ]; //! //! // Run all tests and exit the application appropriatly. //! libtest_mimic::run(&args, tests).exit(); //! ``` //! //! Instead of returning `Ok` or `Err` directly, you want to actually perform //! your tests, of course. See [`Trial::test`] for more information on how to //! define a test. You can of course list all your tests manually. But in many //! cases it is useful to generate one test per file in a directory, for //! example. //! //! You can then run `cargo test --test mytest` to run it. To see the CLI //! arguments supported by this crate, run `cargo test --test mytest -- -h`. //! //! //! # Known limitations and differences to the official test harness //! //! `libtest-mimic` works on a best-effort basis: it tries to be as close to //! `libtest` as possible, but there are differences for a variety of reasons. //! For example, some rarely used features might not be implemented, some //! features are extremely difficult to implement, and removing minor, //! unimportant differences is just not worth the hassle. //! //! Some of the notable differences: //! //! - Output capture and `--nocapture`: simply not supported. The official //! `libtest` uses internal `std` functions to temporarily redirect output. //! `libtest-mimic` cannot use those. See [this issue][capture] for more //! information. //! - `--format=json|junit` //! //! [capture]: https://github.com/LukasKalbertodt/libtest-mimic/issues/9 #![forbid(unsafe_code)] use std::{process, sync::mpsc, fmt, time::Instant}; mod args; mod printer; use printer::Printer; use threadpool::ThreadPool; pub use crate::args::{Arguments, ColorSetting, FormatSetting}; /// A single test or benchmark. /// /// The original `libtest` often calls benchmarks "tests", which is a bit /// confusing. So in this library, it is called "trial". /// /// A trial is created via [`Trial::test`] or [`Trial::bench`]. The trial's /// `name` is printed and used for filtering. The `runner` is called when the /// test/benchmark is executed to determine its outcome. If `runner` panics, /// the trial is considered "failed". If you need the behavior of /// `#[should_panic]` you need to catch the panic yourself. You likely want to /// compare the panic payload to an expected value anyway. pub struct Trial { runner: Box<dyn FnOnce(bool) -> Outcome + Send>, info: TestInfo, } impl Trial { /// Creates a (non-benchmark) test with the given name and runner. /// /// The runner returning `Ok(())` is interpreted as the test passing. If the /// runner returns `Err(_)`, the test is considered failed. pub fn test<R>(name: impl Into<String>, runner: R) -> Self where R: FnOnce() -> Result<(), Failed> + Send + 'static, { Self { runner: Box::new(move |_test_mode| match runner() { Ok(()) => Outcome::Passed, Err(failed) => Outcome::Failed(failed), }), info: TestInfo { name: name.into(), kind: String::new(), is_ignored: false, is_bench: false, }, } } /// Creates a benchmark with the given name and runner. /// /// If the runner's parameter `test_mode` is `true`, the runner function /// should run all code just once, without measuring, just to make sure it /// does not panic. If the parameter is `false`, it should perform the /// actual benchmark. If `test_mode` is `true` you may return `Ok(None)`, /// but if it's `false`, you have to return a `Measurement`, or else the /// benchmark is considered a failure. /// /// `test_mode` is `true` if neither `--bench` nor `--test` are set, and /// `false` when `--bench` is set. If `--test` is set, benchmarks are not /// ran at all, and both flags cannot be set at the same time. pub fn bench<R>(name: impl Into<String>, runner: R) -> Self where R: FnOnce(bool) -> Result<Option<Measurement>, Failed> + Send + 'static, { Self { runner: Box::new(move |test_mode| match runner(test_mode) { Err(failed) => Outcome::Failed(failed), Ok(_) if test_mode => Outcome::Passed, Ok(Some(measurement)) => Outcome::Measured(measurement), Ok(None) => Outcome::Failed("bench runner returned `Ok(None)` in bench mode".into()), }), info: TestInfo { name: name.into(), kind: String::new(), is_ignored: false, is_bench: true, }, } } /// Sets the "kind" of this test/benchmark. If this string is not /// empty, it is printed in brackets before the test name (e.g. /// `test [my-kind] test_name`). (Default: *empty*) /// /// This is the only extension to the original libtest. pub fn with_kind(self, kind: impl Into<String>) -> Self { Self { info: TestInfo { kind: kind.into(), ..self.info }, ..self } } /// Sets whether or not this test is considered "ignored". (Default: `false`) /// /// With the built-in test suite, you can annotate `#[ignore]` on tests to /// not execute them by default (for example because they take a long time /// or require a special environment). If the `--ignored` flag is set, /// ignored tests are executed, too. pub fn with_ignored_flag(self, is_ignored: bool) -> Self { Self { info: TestInfo { is_ignored, ..self.info }, ..self } } /// Returns the name of this trial. pub fn name(&self) -> &str { &self.info.name } /// Returns the kind of this trial. If you have not set a kind, this is an /// empty string. pub fn kind(&self) -> &str { &self.info.kind } /// Returns whether this trial has been marked as *ignored*. pub fn has_ignored_flag(&self) -> bool { self.info.is_ignored } /// Returns `true` iff this trial is a test (as opposed to a benchmark). pub fn is_test(&self) -> bool { !self.info.is_bench } /// Returns `true` iff this trial is a benchmark (as opposed to a test). pub fn is_bench(&self) -> bool { self.info.is_bench } } impl fmt::Debug for Trial { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { struct OpaqueRunner; impl fmt::Debug for OpaqueRunner { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("<runner>") } } f.debug_struct("Test") .field("runner", &OpaqueRunner) .field("name", &self.info.name) .field("kind", &self.info.kind) .field("is_ignored", &self.info.is_ignored) .field("is_bench", &self.info.is_bench) .finish() } } #[derive(Debug)] struct TestInfo { name: String, kind: String, is_ignored: bool, is_bench: bool, } /// Output of a benchmark. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct Measurement { /// Average time in ns. pub avg: u64, /// Variance in ns. pub variance: u64, } /// Indicates that a test/benchmark has failed. Optionally carries a message. /// /// You usually want to use the `From` impl of this type, which allows you to /// convert any `T: fmt::Display` (e.g. `String`, `&str`, ...) into `Failed`. #[derive(Debug, Clone)] pub struct Failed { msg: Option<String>, } impl Failed { /// Creates an instance without message. pub fn without_message() -> Self { Self { msg: None } } /// Returns the message of this instance. pub fn message(&self) -> Option<&str> { self.msg.as_deref() } } impl<M: std::fmt::Display> From<M> for Failed { fn from(msg: M) -> Self { Self { msg: Some(msg.to_string()) } } } /// The outcome of performing a test/benchmark. #[derive(Debug, Clone)] enum Outcome { /// The test passed. Passed, /// The test or benchmark failed. Failed(Failed), /// The test or benchmark was ignored. Ignored, /// The benchmark was successfully run. Measured(Measurement), } /// Contains information about the entire test run. Is returned by[`run`]. /// /// This type is marked as `#[must_use]`. Usually, you just call /// [`exit()`][Conclusion::exit] on the result of `run` to exit the application /// with the correct exit code. But you can also store this value and inspect /// its data. #[derive(Clone, Debug, PartialEq, Eq)] #[must_use = "Call `exit()` or `exit_if_failed()` to set the correct return code"] pub struct Conclusion { /// Number of tests and benchmarks that were filtered out (either by the /// filter-in pattern or by `--skip` arguments). pub num_filtered_out: u64, /// Number of passed tests. pub num_passed: u64, /// Number of failed tests and benchmarks. pub num_failed: u64, /// Number of ignored tests and benchmarks. pub num_ignored: u64, /// Number of benchmarks that successfully ran. pub num_measured: u64, } impl Conclusion { /// Exits the application with an appropriate error code (0 if all tests /// have passed, 101 if there have been failures). pub fn exit(&self) -> ! { self.exit_if_failed(); process::exit(0); } /// Exits the application with error code 101 if there were any failures. /// Otherwise, returns normally. pub fn exit_if_failed(&self) { if self.has_failed() { process::exit(101) } } /// Returns whether there have been any failures. pub fn has_failed(&self) -> bool { self.num_failed > 0 } fn empty() -> Self { Self { num_filtered_out: 0, num_passed: 0, num_failed: 0, num_ignored: 0, num_measured: 0, } } } impl Arguments { /// Returns `true` if the given test should be ignored. fn is_ignored(&self, test: &Trial) -> bool { (test.info.is_ignored && !self.ignored && !self.include_ignored) || (test.info.is_bench && self.test) || (!test.info.is_bench && self.bench) } fn is_filtered_out(&self, test: &Trial) -> bool { let test_name = &test.info.name; // If a filter was specified, apply this if let Some(filter) = &self.filter { match self.exact { true if test_name != filter => return true, false if !test_name.contains(filter) => return true, _ => {} }; } // If any skip pattern were specified, test for all patterns. for skip_filter in &self.skip { match self.exact { true if test_name == skip_filter => return true, false if test_name.contains(skip_filter) => return true, _ => {} } } if self.ignored && !test.info.is_ignored { return true; } false } } /// Runs all given trials (tests & benchmarks). /// /// This is the central function of this crate. It provides the framework for /// the testing harness. It does all the printing and house keeping. /// /// The returned value contains a couple of useful information. See /// [`Conclusion`] for more information. If `--list` was specified, a list is /// printed and a dummy `Conclusion` is returned. pub fn run(args: &Arguments, mut tests: Vec<Trial>) -> Conclusion { let start_instant = Instant::now(); let mut conclusion = Conclusion::empty(); // Apply filtering if args.filter.is_some() || !args.skip.is_empty() || args.ignored { let len_before = tests.len() as u64; tests.retain(|test| !args.is_filtered_out(test)); conclusion.num_filtered_out = len_before - tests.len() as u64; } let tests = tests; // Create printer which is used for all output. let mut printer = printer::Printer::new(args, &tests); // If `--list` is specified, just print the list and return. if args.list { printer.print_list(&tests, args.ignored); return Conclusion::empty(); } // Print number of tests printer.print_title(tests.len() as u64); let mut failed_tests = Vec::new(); let mut handle_outcome = |outcome: Outcome, test: TestInfo, printer: &mut Printer| { printer.print_single_outcome(&outcome); // Handle outcome match outcome { Outcome::Passed => conclusion.num_passed += 1, Outcome::Failed(failed) => { failed_tests.push((test, failed.msg)); conclusion.num_failed += 1; }, Outcome::Ignored => conclusion.num_ignored += 1, Outcome::Measured(_) => conclusion.num_measured += 1, } }; // Execute all tests. let test_mode = !args.bench; if args.test_threads == Some(1) { // Run test sequentially in main thread for test in tests { // Print `test foo ...`, run the test, then print the outcome in // the same line. printer.print_test(&test.info); let outcome = if args.is_ignored(&test) { Outcome::Ignored } else { run_single(test.runner, test_mode) }; handle_outcome(outcome, test.info, &mut printer); } } else { // Run test in thread pool. let pool = match args.test_threads { Some(num_threads) => ThreadPool::new(num_threads), None => ThreadPool::default() }; let (sender, receiver) = mpsc::channel(); let num_tests = tests.len(); for test in tests { if args.is_ignored(&test) { sender.send((Outcome::Ignored, test.info)).unwrap(); } else { let sender = sender.clone(); pool.execute(move || { // It's fine to ignore the result of sending. If the // receiver has hung up, everything will wind down soon // anyway. let outcome = run_single(test.runner, test_mode); let _ = sender.send((outcome, test.info)); }); } } for (outcome, test_info) in receiver.iter().take(num_tests) { // In multithreaded mode, we do only print the start of the line // after the test ran, as otherwise it would lead to terribly // interleaved output. printer.print_test(&test_info); handle_outcome(outcome, test_info, &mut printer); } } // Print failures if there were any, and the final summary. if !failed_tests.is_empty() { printer.print_failures(&failed_tests); } printer.print_summary(&conclusion, start_instant.elapsed()); conclusion } /// Runs the given runner, catching any panics and treating them as a failed test. fn run_single(runner: Box<dyn FnOnce(bool) -> Outcome + Send>, test_mode: bool) -> Outcome { use std::panic::{catch_unwind, AssertUnwindSafe}; catch_unwind(AssertUnwindSafe(move || runner(test_mode))).unwrap_or_else(|e| { // The `panic` information is just an `Any` object representing the // value the panic was invoked with. For most panics (which use // `panic!` like `println!`), this is either `&str` or `String`. let payload = e.downcast_ref::<String>() .map(|s| s.as_str()) .or(e.downcast_ref::<&str>().map(|s| *s)); let msg = match payload { Some(payload) => format!("test panicked: {payload}"), None => format!("test panicked"), }; Outcome::Failed(msg.into()) }) }
true
e537653923751535087321a3cdf536c8b7a265cc
Rust
doytsujin/winrt-rs
/crates/tests/metadata/tests/writer.rs
UTF-8
2,630
2.828125
3
[ "Apache-2.0", "MIT" ]
permissive
#[test] fn writer() { let temp_file = std::env::temp_dir().join("test_metadata.winmd"); { use metadata::writer::*; let mut tables = Tables::new("test.winmd"); let mut def = TypeDef::new(TypeName::new("TestWindows.Foundation", "IStringable")); def.flags.set_public(); def.flags.set_abstract(); def.flags.set_winrt(); def.flags.set_interface(); let mut method = MethodDef::new("ToString"); method.param_list.push(Param { name: "param123".to_string(), sequence: 123, ..Default::default() }); def.method_list.push(method); tables.type_def.push(def); let mut def = TypeDef::new(TypeName::new("TestWindows.Foundation", "Rect")); def.flags.set_public(); def.flags.set_winrt(); def.extends = Some(TypeRef::system_value_type()); def.field_list.push(Field::new("Height")); tables.type_def.push(def); let mut def = TypeDef::new(TypeName::new("TestWindows.Foundation", "AsyncStatus")); def.flags.set_public(); def.flags.set_winrt(); def.extends = Some(TypeRef::system_enum()); def.field_list.push(Field { name: "Completed".to_string(), constant: Some(Value::I32(1)), ..Default::default() }); tables.type_def.push(def); file::write(temp_file.to_str().unwrap(), tables); } { use metadata::reader::*; let files = vec![File::new(temp_file.to_str().unwrap()).unwrap()]; let reader = &Reader::new(&files); let def = reader.get(TypeName::new("TestWindows.Foundation", "IStringable")).next().unwrap(); assert_eq!(reader.type_def_kind(def), TypeKind::Interface); assert!(reader.type_def_flags(def).winrt()); let method = reader.type_def_methods(def).next().unwrap(); assert_eq!(reader.method_def_name(method), "ToString"); let param = reader.method_def_params(method).next().unwrap(); assert_eq!(reader.param_name(param), "param123"); assert_eq!(reader.param_sequence(param), 123); let def = reader.get(TypeName::new("TestWindows.Foundation", "Rect")).next().unwrap(); assert_eq!(reader.type_def_kind(def), TypeKind::Struct); assert!(reader.type_def_flags(def).winrt()); let field = reader.type_def_fields(def).next().unwrap(); assert_eq!(reader.field_name(field), "Height"); let def = reader.get(TypeName::new("TestWindows.Foundation", "AsyncStatus")).next().unwrap(); assert_eq!(reader.type_def_kind(def), TypeKind::Enum); assert!(reader.type_def_flags(def).winrt()); } }
true
6f66aca6ccd6e007e925ff506f4a1930a7f1b294
Rust
luben/repro-hyper
/src/bin/broker.rs
UTF-8
5,391
2.796875
3
[]
no_license
use broker::inspect_drop; use futures::{future, sync::oneshot, Future}; use std::{ cell::RefCell, collections::hash_map::HashMap, collections::VecDeque, error::Error, rc::Rc, time::Duration, }; // the HTTP 1/2 stack use hyper::{ header::HeaderValue, service::service_fn, Body, Method, Request, Response, Server, StatusCode, }; use log::*; use simple_logger; // Run single-threaded use tokio::runtime::current_thread; use uuid::Uuid; /// Broker /// 1. worker polls on `GET /next` /// 2. client submit work to `POST /` . The work is forwarded to a worker (from 1) with request id header /// 3. worker post response to `POST /response` with the same request id, reply is forwarded to the /// client (2) type RequestId = HeaderValue; type ResponseFuture = Box<Future<Item = Response<Body>, Error = Box<Error + Send + Sync>>>; #[derive(Clone)] struct Broker { // workers waiting for a request worker_queue: Rc<RefCell<VecDeque<oneshot::Sender<(RequestId, Body)>>>>, // Map RequestId -> Response channel (oneshot) response_map: Rc<RefCell<HashMap<RequestId, oneshot::Sender<Response<Body>>>>>, } impl Broker { fn new() -> Broker { Broker { worker_queue: Rc::new(RefCell::new(VecDeque::new())), response_map: Rc::new(RefCell::new(HashMap::new())), } } // Return empty response with a status fn empty(&self, status: StatusCode) -> ResponseFuture { Box::new(inspect_drop( future::ok( Response::builder() .status(status) .body(Body::empty()) .unwrap(), ), format!("{:?}", status), )) } // worker ask for the next job fn next(&self) -> ResponseFuture { let (tx, rx) = oneshot::channel(); self.worker_queue.borrow_mut().push_back(tx); Box::new(inspect_drop( rx.and_then(|(id, body)| { debug!("Forward request id {:?}", id); future::ok( Response::builder() .header("X-Request-Id", id) .status(StatusCode::OK) .body(body) .unwrap(), ) }) .map_err(|e| e.into()), "Next()".to_string(), )) } fn request(&self, body: Body) -> ResponseFuture { let id = HeaderValue::from_str(&format!("{}", Uuid::new_v4())).unwrap(); let worker_opt = self.worker_queue.borrow_mut().pop_front(); if let Some(worker) = worker_opt { info!("Found worker {:?}", worker); if let Err((_, body)) = worker.send((id.clone(), body)) { info!("Dead worker, retry"); // worker disconnected, try next one self.request(body) } else { info!("Request sent"); let (tx, rx) = oneshot::channel(); let mut map = self.response_map.borrow_mut(); map.insert(id, tx); Box::new(inspect_drop( rx.map_err(|e| e.into()), "Invoke sent".to_string(), )) } } else { self.empty(StatusCode::NOT_ACCEPTABLE) } } fn response(&self, id: RequestId, body: Body) -> ResponseFuture { debug!("Response for {:?}", id); let mut map = self.response_map.borrow_mut(); if let Some(send) = map.remove(&id) { let resp = Response::builder().body(body).unwrap(); if let Ok(_) = send.send(resp) { debug!("Sent id {:?}", id); self.empty(StatusCode::ACCEPTED) } else { error!("Error sending id {:?}", id); self.empty(StatusCode::INTERNAL_SERVER_ERROR) } } else { error!("Response for missing id {:?}", id); self.empty(StatusCode::NOT_FOUND) } } fn dispatch(&self, req: Request<Body>) -> ResponseFuture { info!("{:?}", req); match (req.method(), req.uri().path()) { (&Method::GET, "/next") => self.next(), (&Method::POST, "/") => self.request(req.into_body()), (&Method::POST, "/response") => { if let Some(id) = req.headers().get("X-Request-Id") { self.response(id.clone(), req.into_body()) } else { self.empty(StatusCode::BAD_REQUEST) } } _ => self.empty(StatusCode::METHOD_NOT_ALLOWED), } } } fn main() { simple_logger::init_with_level(Level::Info).unwrap(); let addr = ([127, 0, 0, 1], 9999).into(); let broker = Box::new(Broker::new()); let exec = current_thread::TaskExecutor::current(); let server = Server::bind(&addr) .tcp_nodelay(true) .tcp_keepalive(Some(Duration::new(30, 0))) .executor(exec) .serve(move || { let broker = broker.clone(); service_fn(move |req| broker.dispatch(req)) }) .map_err(|e| error!("server error: {}", e)); info!("Listening on http://{}", addr); let mut runtime = current_thread::Runtime::new().expect("Failed to create runtime"); runtime.spawn(server); runtime.run().expect("Failed to run"); }
true
57b961a803e4aba5cd9b49302feb85977d4b41df
Rust
iphipps/rust-and-embedded-study-notes
/embedded_programming/rust_embedded/09_chapter/src/main.rs
UTF-8
1,287
2.6875
3
[ "Apache-2.0" ]
permissive
#![no_main] #![no_std] use aux9::{entry, tim6}; #[inline(never)] fn delay(tim6: &tim6::RegisterBlock, ms: u16) { // Set the timer to go off in `ms` ticks // 1 tick = 1 ms tim6.arr.write(|w| w.arr().bits(ms)); // CEN: Enable the counter tim6.cr1.modify(|_, w| w.cen().set_bit()); // Wait until the alarm goes off (until the update event occurs) while !tim6.sr.read().uif().bit_is_set() {} // Clear the update event flag tim6.sr.modify(|_, w| w.uif().clear_bit()); } #[entry] fn main() -> ! { let (mut leds, rcc, tim6) = aux9::init(); // Power on the TIM6 timer rcc.apb1enr.modify(|_, w| w.tim6en().set_bit()); // OPM Select one pulse mode // CEN Keep the counter disabled for now tim6.cr1.write(|w| w.opm().set_bit().cen().clear_bit()); // Configure the prescaler to have the counter operate at 1 KHz // APB1_CLOCK = 8 MHz // PSC = 7999 // 8 MHz / (7999 + 1) = 1 KHz // The counter (CNT) will increase on every millisecond tim6.psc.write(|w| w.psc().bits(7_999)); let ms = 50; loop { for curr in 0..8 { let next = (curr + 1) % 8; leds[next].on(); delay(tim6, ms); leds[curr].off(); delay(tim6, ms); } } }
true
3ee8f5b9caa7a7e624d4c705a9227a7eae8b1baf
Rust
PaddlePaddle/VisualDL
/frontend/packages/wasm/src/high_dimensional.rs
UTF-8
2,430
2.671875
3
[ "Apache-2.0" ]
permissive
use rulinalg::matrix::{BaseMatrix, Matrix}; const THRESHOLD_DIM_NORMALIZE: usize = 50; #[derive(Serialize, Deserialize)] pub struct PCAResult { pub vectors: Vec<Vec<f64>>, pub variance: Vec<f64>, } impl PCAResult { pub fn new(vectors: Vec<Vec<f64>>, variance: Vec<f64>) -> Self { PCAResult { vectors, variance, } } } fn normalize(input: &Vec<f64>, dim: usize) -> Vec<f64> { let len = input.len(); let mut normalized: Vec<f64> = vec![]; let mut centroid = vec![0f64; dim]; let row = len / dim; for i in 0..row { let vector = &input[(i * dim)..((i + 1) * dim)]; for j in 0..dim { centroid[j] += vector[j]; } } for j in 0..dim { centroid[j] /= row as f64; } for i in 0..row { let vector = &input[(i * dim)..((i + 1) * dim)]; let mut sub = vec![0f64; dim]; let mut norm2 = 0f64; for j in 0..dim { sub[j] = vector[j] - centroid[j]; norm2 += sub[j] * sub[j]; } if norm2 > 0f64 { let norm = norm2.sqrt(); for j in 0..dim { sub[j] /= norm; } } normalized.append(&mut sub); } normalized } pub fn pca(input: Vec<f64>, dim: usize, n_components: usize) -> PCAResult { let len = input.len(); if len % dim != 0 { panic!("Input matrix size error!"); } let normalized = if dim >= THRESHOLD_DIM_NORMALIZE { normalize(&input, dim) } else { input }; let row = len / dim; let column = dim; let matrix = Matrix::new(row, column, normalized); let mt = &matrix.transpose(); let scalar = mt * &matrix; let sigma = scalar / row as f64; let (s, u, _v) = sigma.svd().ok().unwrap(); let sd = s.diag().cloned().collect::<Vec<_>>(); let total_variance = sd.iter().sum::<f64>(); let variance = sd.iter().map(|x| x / total_variance).collect::<Vec<_>>(); let vectors = matrix.row_iter().map(|vector| { let mut new_v = vec![0f64; n_components]; for new_dim in 0..n_components { let mut dot = 0f64; for old_dim in 0..column { dot += vector[old_dim] * u.row(old_dim)[new_dim]; } new_v[new_dim] = dot; } new_v }).collect::<Vec<Vec<_>>>(); return PCAResult::new(vectors, variance); }
true
8441dc782ba11bb873d6ab5ff6a41539d1f4546e
Rust
ejmahler/strength_reduce
/tests/test_reduced_unsigned.rs
UTF-8
4,562
2.84375
3
[ "MIT", "Apache-2.0" ]
permissive
#[macro_use] extern crate proptest; extern crate strength_reduce; use proptest::test_runner::Config; use strength_reduce::{StrengthReducedU8, StrengthReducedU16, StrengthReducedU32, StrengthReducedU64, StrengthReducedUsize, StrengthReducedU128}; macro_rules! reduction_proptest { ($test_name:ident, $struct_name:ident, $primitive_type:ident) => ( mod $test_name { use super::*; use proptest::sample::select; fn assert_div_rem_equivalence(divisor: $primitive_type, numerator: $primitive_type) { let reduced_divisor = $struct_name::new(divisor); let expected_div = numerator / divisor; let expected_rem = numerator % divisor; let reduced_div = numerator / reduced_divisor; let reduced_rem = numerator % reduced_divisor; assert_eq!(expected_div, reduced_div, "Divide failed with numerator: {}, divisor: {}", numerator, divisor); assert_eq!(expected_rem, reduced_rem, "Modulo failed with numerator: {}, divisor: {}", numerator, divisor); let (reduced_combined_div, reduced_combined_rem) = $struct_name::div_rem(numerator, reduced_divisor); assert_eq!(expected_div, reduced_combined_div, "div_rem divide failed with numerator: {}, divisor: {}", numerator, divisor); assert_eq!(expected_rem, reduced_combined_rem, "div_rem modulo failed with numerator: {}, divisor: {}", numerator, divisor); } proptest! { #![proptest_config(Config::with_cases(100_000))] #[test] fn fully_generated_inputs_are_div_rem_equivalent(divisor in 1..core::$primitive_type::MAX, numerator in 0..core::$primitive_type::MAX) { assert_div_rem_equivalence(divisor, numerator); } #[test] fn generated_divisors_with_edge_case_numerators_are_div_rem_equivalent( divisor in 1..core::$primitive_type::MAX, numerator in select(vec![0 as $primitive_type, 1 as $primitive_type, core::$primitive_type::MAX - 1, core::$primitive_type::MAX])) { assert_div_rem_equivalence(divisor, numerator); } #[test] fn generated_numerators_with_edge_case_divisors_are_div_rem_equivalent( divisor in select(vec![1 as $primitive_type, 2 as $primitive_type, core::$primitive_type::MAX - 1, core::$primitive_type::MAX]), numerator in 0..core::$primitive_type::MAX) { assert_div_rem_equivalence(divisor, numerator); } } } ) } reduction_proptest!(strength_reduced_u08, StrengthReducedU8, u8); reduction_proptest!(strength_reduced_u16, StrengthReducedU16, u16); reduction_proptest!(strength_reduced_u32, StrengthReducedU32, u32); reduction_proptest!(strength_reduced_u64, StrengthReducedU64, u64); reduction_proptest!(strength_reduced_usize, StrengthReducedUsize, usize); reduction_proptest!(strength_reduced_u128, StrengthReducedU128, u128); macro_rules! exhaustive_test { ($test_name:ident, $struct_name:ident, $primitive_type:ident) => ( #[test] #[ignore] fn $test_name() { for divisor in 1..=std::$primitive_type::MAX { let reduced_divisor = $struct_name::new(divisor); for numerator in 0..=std::$primitive_type::MAX { let expected_div = numerator / divisor; let expected_rem = numerator % divisor; let reduced_div = numerator / reduced_divisor; assert_eq!(expected_div, reduced_div, "Divide failed with numerator: {}, divisor: {}", numerator, divisor); let reduced_rem = numerator % reduced_divisor; assert_eq!(expected_rem, reduced_rem, "Modulo failed with numerator: {}, divisor: {}", numerator, divisor); let (reduced_combined_div, reduced_combined_rem) = $struct_name::div_rem(numerator, reduced_divisor); assert_eq!(expected_div, reduced_combined_div, "div_rem divide failed with numerator: {}, divisor: {}", numerator, divisor); assert_eq!(expected_rem, reduced_combined_rem, "div_rem modulo failed with numerator: {}, divisor: {}", numerator, divisor); } } } ) } exhaustive_test!(test_strength_reduced_u08_exhaustive, StrengthReducedU8, u8); exhaustive_test!(test_strength_reduced_u16_exhaustive, StrengthReducedU16, u16);
true
f2a558dcbfc3bd4410055dfb6a7cfa997a203cd1
Rust
visd0m/yaged
/src/types/mod.rs
UTF-8
5,799
2.84375
3
[ "MIT" ]
permissive
use std::collections::HashMap; pub type ColorMap = HashMap<usize, Rgb>; #[derive(Debug)] pub struct Gif { signature: String, screen_descriptor: ScreenDescriptor, global_color_map: Option<ColorMap>, frames: Vec<Frame>, } impl Gif { pub fn signature(&self) -> &str { &self.signature } pub fn screen_descriptor(&self) -> &ScreenDescriptor { &self.screen_descriptor } pub fn global_color_map(&self) -> &Option<ColorMap> { &self.global_color_map } pub fn frames(&self) -> &Vec<Frame> { &self.frames } pub fn new( signature: String, screen_descriptor: ScreenDescriptor, global_color_map: Option<ColorMap>, frames: Vec<Frame>, ) -> Self { Gif { signature, screen_descriptor, global_color_map, frames, } } } #[derive(Debug)] pub struct ScreenDescriptor { width: u16, height: u16, m: bool, cr: u8, pixel: u8, background: u8, } impl ScreenDescriptor { pub fn width(&self) -> u16 { self.width } pub fn height(&self) -> u16 { self.height } pub fn m(&self) -> bool { self.m } pub fn cr(&self) -> u8 { self.cr } pub fn pixel(&self) -> u8 { self.pixel } pub fn background(&self) -> u8 { self.background } pub fn new(width: u16, height: u16, m: bool, cr: u8, pixel: u8, background: u8) -> Self { ScreenDescriptor { width, height, m, cr, pixel, background, } } pub fn set_m(&mut self, m: bool) { self.m = m; } } #[derive(Debug)] pub struct Rgb { r: u8, g: u8, b: u8, } impl Rgb { pub fn r(&self) -> u8 { self.r } pub fn g(&self) -> u8 { self.g } pub fn b(&self) -> u8 { self.b } pub fn new(r: u8, g: u8, b: u8) -> Self { Rgb { r, g, b } } } #[derive(Debug)] pub struct Frame { image_descriptor: ImageDescriptor, local_color_map: Option<ColorMap>, raster_data: Vec<u8>, rgba_raster_data: Option<Vec<u8>>, graphic_control_extension: Option<GraphicControlExtension>, } impl Frame { pub fn image_descriptor(&self) -> &ImageDescriptor { &self.image_descriptor } pub fn local_color_map(&self) -> &Option<ColorMap> { &self.local_color_map } /// Normal ColorMap index color mapping. /// Color maps / graphic control extension block usage is necessary to retrieve the pixel colors in rgba. pub fn raster_data(&self) -> &Vec<u8> { &self.raster_data } /// This is not a gif89a specification field, present only if requested in the decoding process. /// Every byte of the raster data expanded to 4 bytes (R G B A). /// Color maps / graphic control extension block has already been used internally to obtain this raster data representation. pub fn rgba_raster_data(&self) -> &Option<Vec<u8>> { &self.rgba_raster_data } pub fn graphic_control_extension(&self) -> &Option<GraphicControlExtension> { &self.graphic_control_extension } pub fn new( image_descriptor: ImageDescriptor, local_color_map: Option<ColorMap>, raster_data: Vec<u8>, rgba_raster_data: Option<Vec<u8>>, graphic_control_extension: Option<GraphicControlExtension>, ) -> Self { Frame { image_descriptor, local_color_map, raster_data, rgba_raster_data, graphic_control_extension, } } } #[derive(Debug)] pub enum ExtensionBlock { GraphicControlExtension(GraphicControlExtension), } #[derive(Debug)] pub struct GraphicControlExtension { disposal_method: u8, user_input: bool, transparent_color: bool, delay_time: u16, transparent_color_index: Option<u8>, } impl GraphicControlExtension { pub fn disposal_method(&self) -> u8 { self.disposal_method } pub fn user_input(&self) -> bool { self.user_input } pub fn transparent_color(&self) -> bool { self.transparent_color } pub fn delay_time(&self) -> u16 { self.delay_time } pub fn transparent_color_index(&self) -> Option<u8> { self.transparent_color_index } pub fn new( disposal_method: u8, user_input: bool, transparent_color: bool, delay_time: u16, transparent_color_index: Option<u8>, ) -> Self { GraphicControlExtension { disposal_method, user_input, transparent_color, delay_time, transparent_color_index, } } } #[derive(Debug)] pub struct ImageDescriptor { image_left: u16, image_top: u16, image_width: u16, image_height: u16, m: bool, i: bool, pixel: u8, } impl ImageDescriptor { pub fn image_left(&self) -> u16 { self.image_left } pub fn image_top(&self) -> u16 { self.image_top } pub fn image_width(&self) -> u16 { self.image_width } pub fn image_height(&self) -> u16 { self.image_height } pub fn m(&self) -> bool { self.m } pub fn i(&self) -> bool { self.i } pub fn pixel(&self) -> u8 { self.pixel } pub fn new( image_left: u16, image_top: u16, image_width: u16, image_height: u16, m: bool, i: bool, pixel: u8, ) -> Self { ImageDescriptor { image_left, image_top, image_width, image_height, m, i, pixel, } } }
true
64e852030b379aa26837db023876ef0ad80b1ffc
Rust
jaffa4/siko-1
/crates/siko_ir/src/types.rs
UTF-8
16,955
2.765625
3
[ "MIT" ]
permissive
use crate::class::ClassId; use crate::data::TypeDefId; use crate::program::Program; use crate::type_var_generator::TypeVarGenerator; use crate::unifier::Unifier; use siko_util::format_list; use siko_util::Collector; use siko_util::Counter; use std::collections::BTreeMap; use std::fmt; pub struct ResolverContext { type_args: BTreeMap<usize, String>, next_index: Counter, list_type_id: TypeDefId, class_names: BTreeMap<ClassId, String>, } impl ResolverContext { pub fn new(program: &Program) -> ResolverContext { let mut class_names = BTreeMap::new(); for (name, class) in &program.class_names { class_names.insert(*class, name.clone()); } ResolverContext { type_args: BTreeMap::new(), next_index: Counter::new(), list_type_id: program.get_list_type_id(), class_names: class_names, } } pub fn add_type_arg(&mut self, arg: usize) { if !self.type_args.contains_key(&arg) { let index = self.next_index.next(); self.type_args.insert(arg, format!("t{}", index)); } } pub fn add_named_type_arg(&mut self, arg: usize, name: String) { if !self.type_args.contains_key(&arg) { self.type_args.insert(arg, name); } } pub fn get_type_arg_name(&self, arg: usize) -> String { self.type_args .get(&arg) .expect("type arg name not found") .clone() } pub fn get_list_type_id(&self) -> TypeDefId { self.list_type_id } pub fn get_class_name(&self, class_id: &ClassId) -> &String { self.class_names .get(class_id) .expect("Class name not found") } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum BaseType { Tuple, Named(TypeDefId), Function, Generic, } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum Type { Tuple(Vec<Type>), Named(String, TypeDefId, Vec<Type>), Function(Box<Type>, Box<Type>), Var(usize, Vec<ClassId>), FixedTypeArg(String, usize, Vec<ClassId>), Ref(Box<Type>), Never(usize), } impl Type { pub fn contains(&self, index: usize) -> bool { match self { Type::Tuple(items) => { for item in items { if item.contains(index) { return true; } } return false; } Type::Named(_, _, items) => { for item in items { if item.contains(index) { return true; } } return false; } Type::Function(from, to) => { if from.contains(index) { return true; } if to.contains(index) { return true; } return false; } Type::Var(i, _) => { return *i == index; } Type::FixedTypeArg(_, i, _) => { return *i == index; } Type::Ref(item) => item.contains(index), Type::Never(_) => false, } } pub fn add_constraints(&self, constraints: &Vec<ClassId>) -> Type { match self { Type::Var(index, cs) => { let mut cs = cs.clone(); cs.extend(constraints); Type::Var(*index, cs) } Type::FixedTypeArg(name, index, cs) => { let mut cs = cs.clone(); cs.extend(constraints); Type::FixedTypeArg(name.clone(), *index, cs) } _ => unreachable!(), } } pub fn get_base_type(&self) -> BaseType { match self { Type::Tuple(..) => BaseType::Tuple, Type::Named(_, id, _) => BaseType::Named(*id), Type::Function(..) => BaseType::Function, Type::Var(..) => BaseType::Generic, Type::FixedTypeArg(..) => BaseType::Generic, Type::Ref(item) => item.get_base_type(), Type::Never(_) => BaseType::Generic, } } pub fn remove_fixed_types(&self) -> Type { match self { Type::Tuple(items) => { let new_items: Vec<_> = items.iter().map(|i| i.remove_fixed_types()).collect(); Type::Tuple(new_items) } Type::Named(name, id, items) => { let new_items: Vec<_> = items.iter().map(|i| i.remove_fixed_types()).collect(); Type::Named(name.clone(), *id, new_items) } Type::Function(from, to) => { let from = from.remove_fixed_types(); let to = to.remove_fixed_types(); Type::Function(Box::new(from), Box::new(to)) } Type::Var(..) => self.clone(), Type::FixedTypeArg(_, index, constraints) => Type::Var(*index, constraints.clone()), Type::Ref(item) => { let item = item.remove_fixed_types(); Type::Ref(Box::new(item)) } Type::Never(_) => self.clone(), } } pub fn duplicate( &self, arg_map: &mut BTreeMap<usize, usize>, type_var_generator: &mut TypeVarGenerator, ) -> Type { match self { Type::Tuple(items) => { let new_items: Vec<_> = items .iter() .map(|i| i.duplicate(arg_map, type_var_generator)) .collect(); Type::Tuple(new_items) } Type::Named(name, id, items) => { let new_items: Vec<_> = items .iter() .map(|i| i.duplicate(arg_map, type_var_generator)) .collect(); Type::Named(name.clone(), *id, new_items) } Type::Function(from, to) => { let from = from.duplicate(arg_map, type_var_generator); let to = to.duplicate(arg_map, type_var_generator); Type::Function(Box::new(from), Box::new(to)) } Type::Var(index, constraints) => { let mut gen = type_var_generator.clone(); let new_index = arg_map.entry(*index).or_insert_with(|| gen.get_new_index()); Type::Var(*new_index, constraints.clone()) } Type::FixedTypeArg(name, index, constraints) => { let mut gen = type_var_generator.clone(); let new_index = arg_map.entry(*index).or_insert_with(|| gen.get_new_index()); Type::FixedTypeArg(name.clone(), *new_index, constraints.clone()) } Type::Ref(item) => { let item = item.duplicate(arg_map, type_var_generator); Type::Ref(Box::new(item)) } Type::Never(index) => { let mut gen = type_var_generator.clone(); let new_index = arg_map.entry(*index).or_insert_with(|| gen.get_new_index()); Type::Never(*new_index) } } } pub fn get_arg_count(&self) -> usize { match self { Type::Tuple(..) => 0, Type::Named(..) => 0, Type::Function(_, to) => 1 + to.get_arg_count(), Type::Var(..) => 0, Type::FixedTypeArg(..) => 0, Type::Ref(..) => 0, Type::Never(_) => 0, } } pub fn get_args(&self, args: &mut Vec<Type>) { match self { Type::Tuple(..) => {} Type::Named(..) => {} Type::Function(from, to) => { args.push(*from.clone()); to.get_args(args); } Type::Var(..) => {} Type::FixedTypeArg(..) => {} Type::Ref(..) => {} Type::Never(_) => {} } } pub fn get_result_type(&self, arg_count: usize) -> Type { match self { Type::Tuple(..) => self.clone(), Type::Named(..) => self.clone(), Type::Function(_, to) => { if arg_count == 1 { *to.clone() } else { if arg_count == 0 { self.clone() } else { to.get_result_type(arg_count - 1) } } } Type::Var(..) => self.clone(), Type::FixedTypeArg(..) => self.clone(), Type::Ref(..) => self.clone(), Type::Never(_) => self.clone(), } } fn collect(&self, args: &mut Collector<usize, ClassId>, context: &mut ResolverContext) { match self { Type::Tuple(items) => { for item in items { item.collect(args, context); } } Type::Named(_, _, items) => { for item in items { item.collect(args, context); } } Type::Function(from, to) => { from.collect(args, context); to.collect(args, context); } Type::Var(index, constraints) => { args.add_empty(*index); for c in constraints { args.add(*index, *c); } } Type::FixedTypeArg(name, index, constraints) => { args.add_empty(*index); context.add_named_type_arg(*index, name.clone()); for c in constraints { args.add(*index, *c); } } Type::Ref(item) => item.collect(args, context), Type::Never(_) => {} } } pub fn is_concrete_type(&self) -> bool { match self { Type::Tuple(items) => { for item in items { if !item.is_concrete_type() { return false; } } return true; } Type::Named(_, _, items) => { for item in items { if !item.is_concrete_type() { return false; } } return true; } Type::Function(from, to) => from.is_concrete_type() && to.is_concrete_type(), Type::Var(..) => false, Type::FixedTypeArg(..) => false, Type::Ref(item) => item.is_concrete_type(), Type::Never(_) => true, } } pub fn get_type_args(&self) -> Vec<Type> { match self { Type::Named(_, _, items) => items.clone(), _ => unreachable!(), } } pub fn get_typedef_id(&self) -> TypeDefId { match self { Type::Named(_, id, _) => *id, _ => unreachable!(), } } pub fn collect_type_args(&self, args: &mut Vec<usize>, program: &Program) { let mut resolver_context = ResolverContext::new(program); let mut collector = Collector::new(); self.collect(&mut collector, &mut resolver_context); for arg in collector.items { args.push(arg.0); } } pub fn get_resolved_type_string(&self, program: &Program) -> String { let mut resolver_context = ResolverContext::new(program); self.get_resolved_type_string_with_context(&mut resolver_context) } pub fn get_resolved_type_string_with_context( &self, resolver_context: &mut ResolverContext, ) -> String { let mut args = Collector::new(); self.collect(&mut args, resolver_context); for (arg, _) in &args.items { resolver_context.add_type_arg(*arg); } let mut constraint_strings = Vec::new(); for (arg, classes) in &args.items { for class_id in classes { let class_name = resolver_context.get_class_name(class_id); let c_str = format!( "{} {}", class_name, resolver_context.get_type_arg_name(*arg) ); constraint_strings.push(c_str); } } let prefix = if constraint_strings.is_empty() { format!("") } else { format!("({}) => ", format_list(&constraint_strings[..])) }; let type_str = self.as_string(false, resolver_context); format!("{}{}", prefix, type_str) } fn as_string(&self, need_parens: bool, resolver_context: &ResolverContext) -> String { match self { Type::Tuple(items) => { let ss: Vec<_> = items .iter() .map(|item| item.as_string(false, resolver_context)) .collect(); format!("({})", ss.join(", ")) } Type::Function(from, to) => { let from_str = from.as_string(true, resolver_context); let to_str = to.as_string(true, resolver_context); let func_type_str = format!("{} -> {}", from_str, to_str); if need_parens { format!("({})", func_type_str) } else { func_type_str } } Type::Var(index, _) => resolver_context.get_type_arg_name(*index), Type::FixedTypeArg(name, _, _) => format!("{}", name), Type::Named(name, id, items) => { let ss: Vec<_> = items .iter() .map(|item| item.as_string(true, resolver_context)) .collect(); if *id == resolver_context.get_list_type_id() { assert_eq!(ss.len(), 1); format!("[{}]", ss[0]) } else { let (args, simple) = if ss.is_empty() { (format!(""), true) } else { (format!(" {}", ss.join(" ")), false) }; if simple { format!("{}{}", name, args) } else { if need_parens { format!("({}{})", name, args) } else { format!("{}{}", name, args) } } } } Type::Ref(item) => format!("&{}", item.as_string(need_parens, resolver_context)), Type::Never(_) => format!("!"), } } pub fn apply(&mut self, unifier: &Unifier) -> bool { let new = unifier.apply(self); let changed = *self != new; *self = new; changed } } impl fmt::Display for Type { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Type::Tuple(items) => { let ss: Vec<_> = items.iter().map(|i| format!("{}", i)).collect(); write!(f, "({})", ss.join(", ")) } Type::Named(name, _, items) => { let ss: Vec<_> = items.iter().map(|i| format!("{}", i)).collect(); let args = if ss.is_empty() { "".to_string() } else { format!(" ({})", ss.join(" ")) }; write!(f, "{}{}", name, args) } Type::Function(from, to) => write!(f, "{} -> {}", from, to), Type::Var(id, constraints) => { let c = if constraints.is_empty() { format!("") } else { format!( "/{}", constraints .iter() .map(|c| format!("{}", c)) .collect::<Vec<_>>() .join(", ") ) }; write!(f, "${}{}", id, c) } Type::FixedTypeArg(_, id, constraints) => { let c = if constraints.is_empty() { format!("") } else { format!( "/{}", constraints .iter() .map(|c| format!("{}", c)) .collect::<Vec<_>>() .join(", ") ) }; write!(f, "f${}{}", id, c) } Type::Ref(item) => write!(f, "&{}", item), Type::Never(_) => write!(f, "!"), } } }
true
bdb2192cf0cd690dd4f19483faa6df6e29926dd3
Rust
calum/rust-unic
/gen/src/generate/ucd/age.rs
UTF-8
3,085
2.703125
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::char; use std::collections::BTreeMap; use std::fmt::Display; use std::fs::File; use std::io::{self, Read, Write}; use std::path::Path; use std::str::FromStr; use super::{UnicodeData, UnicodeVersion}; use generate::PREAMBLE; use generate::char_property::ToRangeBSearchMap; use regex::Regex; #[derive(Clone, Default, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] struct AgeData(BTreeMap<char, String>); impl AgeData { fn emit<P: AsRef<Path>>(&self, dir: P) -> io::Result<()> { let AgeData(ref map) = *self; let mut file = File::create(dir.as_ref().join("age_values.rsv"))?; writeln!( file, "{}\n{}", PREAMBLE, map.to_range_bsearch_map(Display::fmt) )?; Ok(()) } } impl FromStr for AgeData { type Err = (); fn from_str(str: &str) -> Result<Self, Self::Err> { lazy_static! { static ref REGEX: Regex = Regex::new( r"(?xm)^ # every line ([[:xdigit:]]{4,6}) # range start (?:..([[:xdigit:]]{4,6}))? # range end (option) [[:blank:]]*;[[:blank:]]* # separator ([[:digit:]]+) # major version \.([[:digit:]]+) # minor version (?:\.([[:digit:]]+))? # micro version (option) ", ).unwrap(); } let mut age_data = BTreeMap::default(); for capture in REGEX.captures_iter(str) { let start = u32::from_str_radix(&capture[1], 16).unwrap(); let end = capture .get(2) .map_or(start, |m| u32::from_str_radix(m.as_str(), 16).unwrap()); let major = capture[3].parse::<u16>().unwrap(); let minor = capture[4].parse::<u16>().unwrap(); let micro = capture .get(5) .map_or(0, |m| m.as_str().parse::<u16>().unwrap()); for point in start..(end + 1) { if let Some(char) = char::from_u32(point) { age_data.insert( char, format!( "Assigned(UnicodeVersion {{ major: {}, minor: {}, micro: {} }})", major, minor, micro, ), ); } } } Ok(AgeData(age_data)) } } /// Generate tables for the ucd-age crate pub fn generate<P: AsRef<Path>>( dir: P, version: &UnicodeVersion, _: &UnicodeData, ) -> io::Result<()> { println!("> unic::ucd::age::tables::unicode_version"); version.emit(&dir)?; println!(">>> Loading UCD DerivedAge"); let mut derived_age = File::open(Path::new("data/ucd/DerivedAge.txt"))?; let mut buffer = String::new(); derived_age.read_to_string(&mut buffer)?; println!("> unic::ucd::age::tables::age_values"); buffer.parse::<AgeData>().unwrap().emit(dir)?; Ok(()) }
true
b94d83fda07a8042671bd9f5ffa18859128a33e8
Rust
vvvm23/sss
/src/md.rs
UTF-8
14,463
3.609375
4
[]
no_license
use std::fs::File; use std::io::prelude::*; /// This file handles the conversion from md to a vector of enums that describes /// the various components of a markdown file. This will not be a comprehensive /// representation of a markdown file, but will be sufficient for my use case. /// /// In particular, I will begin by handlings: /// - Headings using #, both enclosed by another # or not /// - Paragraphs, a sequence of lines delimited by an empty line /// - Images /// - Hyperlinks /// - Code blocks, (indented code blocks) /// - Math Blocks (Inline and Display) /// use std::io::BufReader; /// Enum containing all supported paragraph markdown components #[derive(Debug)] pub enum PGComponent { Text(String), // Default type Bold(String), // ** ** Italics(String), // * * Hyperlink(String, String), // (text, url) Code(String), // Inline code Math(String), // Inline math } /// Enum containing all supported markdown components #[derive(Debug)] pub enum MDComponent { Heading(u8, String), Paragraph(Vec<PGComponent>), Image(String, String), CodeBlock(String), // Quote(String), // Math(String), // Display math } /// Used for defining the current block for multi-line blocks #[derive(Copy, Clone)] enum Block { Code, Paragraph, Quote, } /// Interpret Chars as a heading and return MDComponent::Heading fn parse_heading(text: &mut std::str::Chars) -> MDComponent { let mut depth: u8 = 1; while text.next() == Some('#') { depth += 1; } MDComponent::Heading(depth, text.take_while(|x| *x != '#').collect()) } /// Interpret String as a code block and return MDComponent::CodeBlock fn parse_code(text: &String) -> MDComponent { MDComponent::CodeBlock(text.to_string()) } /// Interpret String as a paragraph and return MDComponent::Paragraph fn parse_paragraph(text: &String) -> MDComponent { let mut current_block: String = "".to_string(); let mut pg_vec: Vec<PGComponent> = Vec::new(); let mut chars = text.chars(); let text_chars = chars.by_ref(); // TODO: Check for unclosed tags and other such error handling loop { let c = &text_chars.next(); match c { Some('*') => { // Some form of emphasis if current_block.len() > 0 { pg_vec.push(PGComponent::Text(current_block)); current_block = "".to_string(); } match text_chars.next() { Some('*') => { // Bold let bold: String = text_chars.take_while(|x| *x != '*').collect(); let closing = text_chars.next(); match closing { Some('*') => (), _ => panic!("No closing asterix in bold tag! (requires **)"), }; pg_vec.push(PGComponent::Bold(bold.to_string())); } Some(c) => { // Italics let italics: String = format!( "{}{}", c, text_chars.take_while(|x| *x != '*').collect::<String>() ); // bit wack pg_vec.push(PGComponent::Italics(italics)); } None => { // Something went wrong panic!("Expected paragraph stream to continue. It did not.."); } }; } Some('[') => { // Inline link if current_block.len() > 0 { pg_vec.push(PGComponent::Text(current_block)); current_block = "".to_string(); } let text: String = text_chars.take_while(|x| *x != ']').collect(); let url: String = text_chars .skip_while(|x| *x != '(') .skip(1) .take_while(|x| *x != ')') .collect(); pg_vec.push(PGComponent::Hyperlink(text, url)); } Some('`') => { // Inline code if current_block.len() > 0 { pg_vec.push(PGComponent::Text(current_block)); current_block = "".to_string(); } let code: String = text_chars.take_while(|x| *x != '`').collect(); pg_vec.push(PGComponent::Code(code)); } Some('\\') => { // Escape character match text_chars.next() { Some('(') => { if current_block.len() > 0 { pg_vec.push(PGComponent::Text(current_block)); current_block = "".to_string(); } let mut math_text: String = "".to_string(); let mut last_char = text_chars.next(); loop { let next_char = text_chars.next(); if let Some('\\') = last_char { if let Some(')') = next_char { break; } } math_text.push(last_char.unwrap()); last_char = next_char; } pg_vec.push(PGComponent::Math(math_text)); } Some(c) => { current_block.push('\\'); current_block.push(c); } None => { break; // TODO: Catch this case } } } Some(ch) => { // Any other character current_block.push(*ch); } None => { // Reached end of iterator break; } } } // Flush remaining block as text if current_block.len() > 0 { pg_vec.push(PGComponent::Text(current_block)); } MDComponent::Paragraph(pg_vec) } /// Interpret String as a quote block and return MDComponent::Quote pub fn parse_quote(text: &String) -> MDComponent { MDComponent::Quote(text.to_string()) } pub fn parse_math(text: &String) -> MDComponent { MDComponent::Math(text.to_string()) } /// Takes path to markdown file and returns Vec<MDComponent> representing the file pub fn parse_md_file(path: &str) -> std::io::Result<Vec<MDComponent>> { let f = File::open(path)?; let f = BufReader::new(f); let mut md_vec: Vec<MDComponent> = Vec::new(); // Initialise stream to empty vec let mut block: String = "".to_string(); // Initialise current block to empty let mut current_block: Option<Block> = None; // Set current block to None for (_, l) in f.lines().enumerate() { let line = l.unwrap().to_string(); let mut line_chars = line.chars(); let c = line_chars.next(); let md_c = match c { Some('#') => { // Found a heading let md_cc = match current_block { Some(Block::Paragraph) => Some(parse_paragraph(&block)), Some(Block::Code) => Some(parse_code(&block)), Some(Block::Quote) => Some(parse_code(&block)), None => None, }; if let Some(b) = md_cc { md_vec.push(b); block = "".to_string(); current_block = None; } Some(parse_heading(&mut line_chars)) } Some('>') => { // Found a block quote let md_cc = match current_block { Some(Block::Paragraph) => Some(parse_paragraph(&block)), Some(Block::Code) => Some(parse_code(&block)), Some(Block::Quote) => None, None => None, }; if let Some(b) = md_cc { md_vec.push(b); block = "".to_string(); } current_block = Some(Block::Quote); block.push_str(&line.chars().skip(2).collect::<String>()); block.push_str(" "); None } Some('$') => { // Potentially found a display math block match line_chars.next() { Some('$') => { // Display Math let md_cc = match current_block { Some(Block::Code) => Some(parse_code(&block)), Some(Block::Quote) => Some(parse_quote(&block)), Some(Block::Paragraph) => Some(parse_paragraph(&block)), None => None, }; if let Some(b) = md_cc { md_vec.push(b); block = "".to_string(); } let math_text: String = line_chars .take_while(|x| *x != '$') .collect(); md_vec.push(parse_math(&math_text)); } Some(c) => { let md_cc = match current_block { Some(Block::Code) => Some(parse_code(&block)), Some(Block::Quote) => Some(parse_quote(&block)), Some(Block::Paragraph) => None, None => None, }; if let Some(b) = md_cc { md_vec.push(b); block = "".to_string(); } current_block = Some(Block::Paragraph); block.push_str(&line); block.push(c); } None => { let md_cc = match current_block { Some(Block::Code) => Some(parse_code(&block)), Some(Block::Quote) => Some(parse_quote(&block)), Some(Block::Paragraph) => None, None => None, }; if let Some(b) = md_cc { md_vec.push(b); block = "".to_string(); } current_block = Some(Block::Paragraph); block.push('$'); } } None } Some(' ') => { // Potentially found a code block if line_chars.take(3).collect::<String>() == " " { let md_cc = match current_block { Some(Block::Code) => None, Some(Block::Paragraph) => Some(parse_paragraph(&block)), Some(Block::Quote) => Some(parse_quote(&block)), None => None, }; if let Some(b) = md_cc { md_vec.push(b); block = "".to_string(); } current_block = Some(Block::Code); block.push_str(&line.chars().skip(4).collect::<String>()); block.push_str("\n"); } else { let md_cc = match current_block { Some(Block::Code) => Some(parse_code(&block)), Some(Block::Quote) => Some(parse_quote(&block)), Some(Block::Paragraph) => None, None => None, }; if let Some(b) = md_cc { md_vec.push(b); block = "".to_string(); } current_block = Some(Block::Paragraph); block.push_str(&line); block.push_str(" "); } None } Some('!') => { // Found an image let alt_text: String = line_chars .skip_while(|x| *x != '[') .skip(1) .take_while(|x| *x != ']') .collect(); let url: String = line .chars() .skip_while(|x| *x != '(') .skip(1) .take_while(|x| *x != ')') .collect(); Some(MDComponent::Image(alt_text, url)) } None => { let md_cc = match current_block { Some(Block::Quote) => Some(parse_quote(&block)), Some(Block::Paragraph) => Some(parse_paragraph(&block)), Some(Block::Code) => Some(parse_code(&block)), None => None, }; if let Some(c) = md_cc { md_vec.push(c); } block = "".to_string(); current_block = None; None } _ => { // Found something else, interpret as paragraph block.push_str(&line); block.push_str(" "); current_block = Some(Block::Paragraph); None } }; if let Some(c) = md_c { md_vec.push(c); block = "".to_string(); } } // Add final block let md_cc = match current_block { Some(Block::Quote) => Some(parse_quote(&block)), Some(Block::Paragraph) => Some(parse_paragraph(&block)), Some(Block::Code) => Some(parse_code(&block)), None => None, }; if let Some(b) = md_cc { md_vec.push(b); } Ok(md_vec) }
true
5ec38d4aebececc0dd6a035924bb2accf7194663
Rust
colelawrence/cargo-watt
/macro-test/src/lib.rs
UTF-8
745
2.59375
3
[ "MIT" ]
permissive
use proc_macro::TokenStream; extern crate proc_macro; #[proc_macro] pub fn my_macro(_input: TokenStream) -> proc_macro::TokenStream { let stream = quote::quote! { println!("{}", 42); }; stream.into() } #[proc_macro] pub fn my_macro1(_input: TokenStream) -> proc_macro::TokenStream { let stream = quote::quote! { println!("{}", 1337); }; stream.into() } #[proc_macro_derive(DeriveMacro, attributes(test))] pub fn derive_macro(_item: TokenStream) -> TokenStream { "fn answer() -> u32 { 42 }".parse().unwrap() } #[proc_macro_attribute] pub fn attribute_macro_twice(_attr: TokenStream, input: TokenStream) -> TokenStream { let mut stream = input.clone(); stream.extend(input); stream }
true
ab5f9283ceacb81a511bdcb38c5e531aab206182
Rust
rome/tools
/crates/rome_js_analyze/src/analyzers/suspicious/no_sparse_array.rs
UTF-8
3,560
2.515625
3
[ "MIT" ]
permissive
use rome_analyze::{context::RuleContext, declare_rule, ActionCategory, Ast, Rule, RuleDiagnostic}; use rome_console::markup; use rome_diagnostics::Applicability; use rome_js_factory::make; use rome_js_syntax::{AnyJsArrayElement, AnyJsExpression, JsArrayExpression, TriviaPieceKind}; use rome_rowan::{AstNode, AstNodeExt, AstSeparatedList, BatchMutationExt}; use crate::JsRuleAction; declare_rule! { /// Disallow sparse arrays /// /// ## Examples /// /// ### Invalid /// /// ```js,expect_diagnostic /// [1,,2] /// ``` pub(crate) NoSparseArray { version: "0.7.0", name: "noSparseArray", recommended: true, } } impl Rule for NoSparseArray { type Query = Ast<JsArrayExpression>; type State = (); type Signals = Option<Self::State>; type Options = (); fn run(ctx: &RuleContext<Self>) -> Option<Self::State> { let node = ctx.query(); // We defer collect `JsHole` index until user want to apply code action. node.elements().iter().find_map(|element| { if matches!(element.ok()?, AnyJsArrayElement::JsArrayHole(_),) { Some(()) } else { None } }) } fn diagnostic(ctx: &RuleContext<Self>, _state: &Self::State) -> Option<RuleDiagnostic> { let node = ctx.query(); Some(RuleDiagnostic::new(rule_category!(), node.syntax().text_trimmed_range(), markup! { "This "<Emphasis>"array"</Emphasis>" contains an "<Emphasis>"empty slot"</Emphasis>"." } .to_owned() )) } fn action(ctx: &RuleContext<Self>, _state: &Self::State) -> Option<JsRuleAction> { let node = ctx.query(); let mut mutation = ctx.root().begin(); let mut final_array_element_list = node.elements(); for (i, item) in final_array_element_list.iter().enumerate() { if matches!(item, Ok(AnyJsArrayElement::JsArrayHole(_))) { let undefine_indent = if i == 0 { make::ident("undefined") } else { make::ident("undefined") .with_leading_trivia([(TriviaPieceKind::Whitespace, " ")]) }; let ident_expr = make::js_identifier_expression(make::js_reference_identifier(undefine_indent)); // Why we need to use `final_array_element_list.iter().nth(i)` instead of `item`, because every time we // call `replace_node` the previous iteration `item` is not the descent child of current `final_array_element_list` any more. let n_element = final_array_element_list.iter().nth(i)?.ok()?; final_array_element_list = final_array_element_list.replace_node( n_element, AnyJsArrayElement::AnyJsExpression(AnyJsExpression::JsIdentifierExpression( ident_expr, )), )?; } } mutation.replace_node( node.clone(), make::js_array_expression( node.l_brack_token().ok()?, final_array_element_list, node.r_brack_token().ok()?, ), ); Some(JsRuleAction { category: ActionCategory::QuickFix, applicability: Applicability::MaybeIncorrect, message: markup! { "Replace hole with undefined" }.to_owned(), mutation, }) } }
true
02d0c4bcc7d98b0af190fc39909f24ea0842f724
Rust
tomhoule/prisma-engine
/libs/datamodel/core/src/common/functions/traits.rs
UTF-8
945
3
3
[ "Apache-2.0" ]
permissive
use crate::ast; use crate::common::value::{MaybeExpression, ValueValidator}; use crate::error::DatamodelError; /// Trait for functions which can be accessed from the datamodel. pub trait Functional { /// Gets the name of the function. fn name(&self) -> &str; /// Applies the function to the given arguments and returns the result. fn apply(&self, values: &[ValueValidator], span: ast::Span) -> Result<MaybeExpression, DatamodelError>; /// Internal: Shorthand to check the count of arguments, and raise an error if applicable. fn check_arg_count(&self, values: &[ValueValidator], count: usize, span: ast::Span) -> Result<(), DatamodelError> { if values.len() == count { Ok(()) } else { Err(DatamodelError::new_argument_count_missmatch_error( self.name(), count, values.len(), span, )) } } }
true
013c31a5286bce22329164ff926df17300a0013f
Rust
cnruby/learn-rust-by-crates
/hello-borrowing/bin-hello/examples/expand/move_vec.rs
UTF-8
1,082
3.390625
3
[]
no_license
#![allow(unused_variables)] #[cfg(feature = "ok")] fn main() { let v: Vec<u8> = vec![1, 2, 3]; println!("v is {:p}", &v); // clone // *cloned* the variable v, rendering v usable. let z = v.clone(); println!("z is {:p}", &z); println!("v is {:p}", &v); // move / copy // *moved* the variable v, rendering v unusable. let w = v; println!("w is {:p}", &w); //println!("v is {:p}", &v); // ERROR } #[cfg(feature = "err")] fn main() { // move occurs because `v` has type `std::vec::Vec<u8>`, // which does not implement the `Copy` trait let v: Vec<u8> = vec![1, 2, 3]; println!("v is {:p}", &v); // clone let z = v.clone(); println!("z is {:p}", &z); println!("v is {:p}", &v); // move / copy // *moved* the variable v, rendering v unusable. let w = v; // value moved here println!("w is {:p}", &w); println!("v is {:p}", &v); // ERROR: value borrowed here after move } #[cfg(all(not(feature = "ok"), not(feature = "err")))] fn main() { use aide::*; hello(); }
true
3bd56dafc9be0df39907bc3103619a9bf02669c0
Rust
ErikssonM/weekend-tracer
/src/color.rs
UTF-8
1,413
3.09375
3
[]
no_license
use std::iter::Sum; use std::ops::{Add, Mul}; use rand::random; use crate::geometry::{rand_in, v3, V3}; #[derive(Clone, Copy, Debug)] pub struct Color(pub V3); impl Color { pub fn black() -> Self { Color(v3(0.0, 0.0, 0.0)) } pub fn ppm(&self) -> String { // sqrt for gamma correction let r = self.0.x.sqrt(); let g = self.0.y.sqrt(); let b = self.0.z.sqrt(); format!( "{} {} {}", (256.0 * r.clamp(0.0, 0.999)) as u32, (256.0 * g.clamp(0.0, 0.999)) as u32, (256.0 * b.clamp(0.0, 0.999)) as u32, ) } pub fn mut_const_mul(&mut self, c: f64) { self.0 *= c; } pub fn random() -> Color { Color(v3(random(), random(), random())) } pub fn random_in(min: f64, max: f64) -> Color { Color(v3(rand_in(min, max), rand_in(min, max), rand_in(min, max))) } } impl Add for Color { type Output = Color; fn add(self, rhs: Self) -> Self::Output { Color(self.0 + rhs.0) } } impl Mul for Color { type Output = Color; fn mul(self, rhs: Self) -> Self::Output { Color(v3( self.0.x * rhs.0.x, self.0.y * rhs.0.y, self.0.z * rhs.0.z, )) } } impl Sum for Color { fn sum<I: Iterator<Item = Self>>(iter: I) -> Self { iter.fold(Color::black(), |a, b| a + b) } }
true
1a1aad8236a6511ae9a4aa0ad3d8154226152faa
Rust
sam-wright/Advent-of-Code
/2018/day1/src/main.rs
UTF-8
1,441
3.3125
3
[]
no_license
use std::collections::HashMap; use std::fs::File; use std::io::{self, Read}; fn main() -> io::Result<()> { let mut file = File::open("input.txt")?; //let mut file = File::open("test_input.txt")?; let mut contents = String::new(); file.read_to_string(&mut contents)?; contents = contents.replace("+", ""); let collection: Vec<&str> = contents.split('\n').collect(); let numbers: Vec<i64> = collection[..collection.len() - 1] .iter() .map(|v| v.parse().expect("expected a number")) .collect(); // For some reason the last element of collection is empty, hence processing up to len-1 let mut temp_sum = 0; let mut frequencies = HashMap::new(); let mut looking_for_dup = true; let mut first_run = true; let mut sum = 0i64; // Part1 while looking_for_dup { sum = numbers.iter().fold(sum, |sum, &val| { temp_sum = sum + val; //println!("temp sum = {}", temp_sum); let counter = frequencies.entry(temp_sum).or_insert(0); *counter += 1; //println!("counter = {}", counter); if *counter > 1 { println!("Found dup = {}", temp_sum); looking_for_dup = false; } temp_sum }); if first_run { println!("Final sum = {}", sum); first_run = false; } } // Part2 Ok(()) }
true
c4d06054b20b6fe48e66ff087ce701f78c3a8421
Rust
wadackel/rs-td4
/td4/src/opcode.rs
UTF-8
499
3.265625
3
[ "MIT" ]
permissive
use num_derive::FromPrimitive; // Operation Code #[derive(Debug, Copy, Clone, FromPrimitive, ToPrimitive)] pub enum Opcode { AddA = 0b0000, // ADD A, Im AddB = 0b0101, // ADD B, Im MovA = 0b0011, // MOV A, Im MovB = 0b0111, // MOV B, Im MovAB = 0b0001, // MOV A, B MovBA = 0b0100, // MOV B, A Jmp = 0b1111, // JMP Im Jnc = 0b1110, // JNC Im InA = 0b0010, // IN A InB = 0b0110, // IN B Out = 0b1011, // OUT Im OutB = 0b1001, // OUT B }
true
0b00ae044d41fc52a922217e153c7865bd7282dd
Rust
yamash723/nes-hello-world-rust
/src/nes/cassette/header.rs
UTF-8
2,073
3.40625
3
[]
no_license
use super::CassetteInitializeError; #[derive(Debug, PartialEq)] pub struct INesHeader { /// ASCII letters 'NES' followed by 0x1A(EOF) pub magic_numbers: [u8; 4], /// Number of pages for The program rom pub prg_size: u8, /// Number of pages for The character rom pub chr_size: u8, } impl INesHeader { pub fn new(buf: &Vec<u8>) -> Result<Self, CassetteInitializeError> { // <iNES file format header> // 0-3: Constant $4E $45 $53 $1A ("NES" followed by MS-DOS end-of-file) // 4: Size of PRG ROM in 16 KB units // 5: Size of CHR ROM in 8 KB units (Value 0 means the board uses CHR RAM) // // refer: https://wiki.nesdev.com/w/index.php/INES let magic_numbers = *array_ref!(buf, 0, 4); if &magic_numbers != "NES\x1A".as_bytes() { return Err(CassetteInitializeError::FormatError) }; Ok(INesHeader { magic_numbers: magic_numbers, prg_size: buf[4], chr_size: buf[5], }) } } #[cfg(test)] mod ines_header_test { use super::*; #[test] fn new_success() { // "N" "E" "S" "\x1A" "5" "3" let rom_bytes = [78, 69, 83, 26, 53, 51]; assert_eq!(rom_bytes, *"NES\x1A53".as_bytes()); let ines_header = INesHeader::new(&rom_bytes.to_vec()).unwrap(); assert_eq!(ines_header, INesHeader { magic_numbers: [ rom_bytes[0], rom_bytes[1], rom_bytes[2], rom_bytes[3], ], prg_size: rom_bytes[4], chr_size: rom_bytes[5], }); } #[test] fn new_format_error() { // "N" "N" "S" "\x1A" "5" "3" let rom_bytes = [78, 78, 83, 26, 53, 51]; assert_eq!(rom_bytes, *"NNS\x1A53".as_bytes()); let ines_header = INesHeader::new(&rom_bytes.to_vec()); assert!( match ines_header { Err(CassetteInitializeError::FormatError) => true, _ => false, } ); } }
true
44bf53f13e2a7bfaac5108e5a4e05de11cdec05f
Rust
accierro/leetcode-solutions
/easy/matrix/toeplitz_matrix/src/main.rs
UTF-8
1,995
3.921875
4
[]
no_license
// A matrix is Toeplitz if every diagonal from top-left to bottom-right has the same element. // Now given an M x N matrix, return True if and only if the matrix is Toeplitz. // Example 1: // Input: // matrix = [ // [1,2,3,4], // [5,1,2,3], // [9,5,1,2] // ] // Output: True // Explanation: // In the above grid, the diagonals are: // "[9]", "[5, 5]", "[1, 1, 1]", "[2, 2, 2]", "[3, 3]", "[4]". // In each diagonal all elements are the same, so the answer is True. // Example 2: // Input: // matrix = [ // [1,2], // [2,2] // ] // Output: False // Explanation: // The diagonal "[1, 2]" has different elements. // Note: // matrix will be a 2D array of integers. // matrix will have a number of rows and columns in range [1, 20]. // matrix[i][j] will be integers in range [0, 99]. struct Solution; impl Solution { pub fn is_toeplitz_matrix(matrix: Vec<Vec<i32>>) -> bool { // 5 4 3 2 1 0 // 6 // 7 let rows = matrix.len() as usize; let col = matrix[0].len() as usize; let d = rows + col - 1; for i in 0..d { let mut m: usize = if i < col { 0 } else { i - col }; let mut n: usize = if i < col { i } else { 0 }; // println!("{} {} {}", i, m, n); let num = matrix[m][n]; while m < rows as usize && n < col as usize { if matrix[m][n] != num { return false; } m += 1; n += 1; } } true } } fn main() { // assert_eq!( // Solution::is_toeplitz_matrix(vec![vec![1, 2, 3, 4], vec![5, 1, 2, 3], vec![9, 5, 1, 2]]), // true // ); // assert_eq!( // Solution::is_toeplitz_matrix(vec![vec![1, 2], vec![2, 2]]), // false // ); // assert_eq!(Solution::is_toeplitz_matrix(vec![vec![65, 98, 57]]), true); assert_eq!( Solution::is_toeplitz_matrix(vec![vec![33, 72, 44, 89]]), true ); }
true
441c35256e92801dcde4365c66ff1f4750a9d7eb
Rust
AlisCode/bevy
/crates/bevy_core/src/time/time.rs
UTF-8
1,295
3
3
[ "MIT" ]
permissive
use bevy_ecs::ResMut; use bevy_utils::{Duration, Instant}; /// Tracks elapsed time since the last update and since the App has started #[derive(Debug)] pub struct Time { pub delta: Duration, pub instant: Option<Instant>, pub delta_seconds_f64: f64, pub delta_seconds: f32, pub seconds_since_startup: f64, pub startup: Instant, } impl Default for Time { fn default() -> Time { Time { delta: Duration::from_secs(0), instant: None, startup: Instant::now(), delta_seconds_f64: 0.0, seconds_since_startup: 0.0, delta_seconds: 0.0, } } } impl Time { pub fn update(&mut self) { let now = Instant::now(); if let Some(instant) = self.instant { self.delta = now - instant; self.delta_seconds_f64 = self.delta.as_secs_f64(); self.delta_seconds = self.delta.as_secs_f32(); } let duration_since_startup = now - self.startup; self.seconds_since_startup = duration_since_startup.as_secs_f64(); self.instant = Some(now); } pub fn time_since_startup(&self) -> Duration { Instant::now() - self.startup } } pub(crate) fn time_system(mut time: ResMut<Time>) { time.update(); }
true
d3db477882b00d08e38c76c90726cccca2840610
Rust
wgledbetter/learn-rust
/src/vars.rs
UTF-8
291
3.171875
3
[]
no_license
pub fn run() { // Default let v1 = "Fake"; println!("v1 = {}", v1); // Mutable let mut v2 = 0; v2 = 5; println!("v2 = {}", v2); // Const const V3: i128 = 345; println!("V3 = {}", V3); // Multiple Variables let (t1, t2) = ("First", 45.6); }
true
18acc426faa2cd1188119b2e8aa779f870fad676
Rust
Daniel-B-Smith/NPR-Weekend-Puzzle
/oct_21_2018_pat_rondon/src/main.rs
UTF-8
2,440
2.96875
3
[ "MIT" ]
permissive
/* This code finds a solution to the NPR puzzle presented on Oct 21, 2018. A (paraphrased) statement of the problem: Given the letters in the word 'beermouth', contruct a three by three matrix of words where all the verticals, horizontals, and diagonals form three letter words. This solution is a (nearly line for line) translation of the solution written y Pat Rondon here: https://gist.github.com/pat-rondon/9568e2840d9af31030bf4c9d4b7ebae8 */ use std::collections::HashSet; use std::error::Error; use std::fs::File; use std::io::{BufRead, BufReader}; use std::iter::Iterator; extern crate permutohedron; use permutohedron::Heap; extern crate itertools; use itertools::any; use itertools::Itertools; fn is_word<'a, I>(letters: I, words: &HashSet<String>) -> bool where I: Iterator<Item = &'a char>, { let word: String = letters.collect(); words.contains(&word) } fn main() { let f = match File::open("dictionary.txt") { Ok(f) => f, Err(why) => panic!("Failed to open links.txt: {}", Error::description(&why)), }; let words: HashSet<String> = BufReader::new(f) .lines() .map(|s| s.unwrap()) .filter(|s| s.len() == 3) .collect(); let mut letters: Vec<char> = "beermouth".chars().collect(); let heap = Heap::new(&mut letters); for perm in heap { let rows = perm.iter().chunks(3); if any(&rows, |letters| !is_word(letters, &words)) { continue; } let mut missing = false; let mut iter_perm = perm.iter(); for _ in 0..3 { let word: String = iter_perm.clone().step_by(3).collect(); if !words.contains(&word) { missing = true; break; } iter_perm.next(); } if missing { continue; } let diagonal_top_left: String = [perm[0], perm[4], perm[8]].iter().collect(); if !words.contains(&diagonal_top_left) { continue; } let diagonal_top_right: String = [perm[2], perm[4], perm[6]].iter().collect(); if !words.contains(&diagonal_top_right) { continue; } println!("Found solution:"); for ii in 0..3 { println!( " {} {} {}", perm[3 * ii], perm[3 * ii + 1], perm[3 * ii + 2] ); } } }
true
398ebfb14fd273c1744ef9f3ce8997a21858258b
Rust
Ryman/advent-of-code
/2017/src/bin/two.rs
UTF-8
3,571
4.09375
4
[]
no_license
/* --- Day 2: Corruption Checksum --- As you walk through the door, a glowing humanoid shape yells in your direction. "You there! Your state appears to be idle. Come help us repair the corruption in this spreadsheet - if we take another millisecond, we'll have to display an hourglass cursor!" The spreadsheet consists of rows of apparently-random numbers. To make sure the recovery process is on the right track, they need you to calculate the spreadsheet's checksum. For each row, determine the difference between the largest value and the smallest value; the checksum is the sum of all of these differences. For example, given the following spreadsheet: 5 1 9 5 7 5 3 2 4 6 8 The first row's largest and smallest values are 9 and 1, and their difference is 8. The second row's largest and smallest values are 7 and 3, and their difference is 4. The third row's difference is 6. In this example, the spreadsheet's checksum would be 8 + 4 + 6 = 18. What is the checksum for the spreadsheet in your puzzle input? */ use std::io::Read; use std::fs::File; use std::cmp; fn main() { let mut input = File::open("inputs/two.txt").unwrap(); let mut s = String::new(); input.read_to_string(&mut s).unwrap(); println!("a: {}", solve_a(&s)); println!("a: {}", solve_b(&s)); } fn solve_a(input: &str) -> u32 { input.lines().fold(0, |checksum, line| { let numbers = line.split(&['\t', ' '][..]).map(|s| s.parse::<u32>().unwrap()); let mut min = std::u32::MAX; let mut max = std::u32::MIN; for number in numbers { min = cmp::min(min, number); max = cmp::max(max, number); } checksum + max - min }) } /* --- Part Two --- "Great work; looks like we're on the right track after all. Here's a star for your effort." However, the program seems a little worried. Can programs be worried? "Based on what we're seeing, it looks like all the User wanted is some information about the evenly divisible values in the spreadsheet. Unfortunately, none of us are equipped for that kind of calculation - most of us specialize in bitwise operations." It sounds like the goal is to find the only two numbers in each row where one evenly divides the other - that is, where the result of the division operation is a whole number. They would like you to find those numbers on each line, divide them, and add up each line's result. For example, given the following spreadsheet: 5 9 2 8 9 4 7 3 3 8 6 5 In the first row, the only two numbers that evenly divide are 8 and 2; the result of this division is 4. In the second row, the two numbers are 9 and 3; the result is 3. In the third row, the result is 2. In this example, the sum of the results would be 4 + 3 + 2 = 9. What is the sum of each row's result in your puzzle input? */ fn solve_b(input: &str) -> u32 { input.lines().fold(0, |checksum, line| { let numbers = line.split(&['\t', ' '][..]).map(|s| s.parse::<u32>().unwrap()).collect::<Vec<_>>(); for (i, &a) in numbers.iter().enumerate() { for &b in &numbers[i + 1..] { if a % b == 0 { return checksum + (a / b) } else if b % a == 0 { return checksum + (b / a) } } } unreachable!() }) } #[test] fn test_a() { let input = "5 1 9 5 7 5 3 2 4 6 8"; assert_eq!(solve_a(input), 18); } #[test] fn test_b() { let input = "5 9 2 8 9 4 7 3 3 8 6 5"; assert_eq!(solve_b(input), 9); }
true
fcf624a825eb4c3c1dd18f0f9cdbba1210db71dd
Rust
MissionKontrol/css_walker
/src/main.rs
UTF-8
1,797
3.046875
3
[]
no_license
use std::fs; use scraper::{Html, Selector}; use selectors::attr::CaseSensitivity; use clap::{App, Arg}; #[derive(Default)] struct RollResult { player_name: String, date: String, // die: String, // result: u8, } fn main() { // ARGs let matches = App::new("rust_test") .version("0.1.0") .author("Peter Forsythe") .arg( Arg::with_name("file") .short("f") .long("file") .takes_value(true) .help("HTML file to parse"), ) .get_matches(); // file setup let file = matches.value_of("file").unwrap_or( "/home/peter/Documents/git/data/Chat Log for The Seeds of Evil-small.html" // .unwrap_or("/home/peter/Documents/git/rust_test/data/Chat Log fragment.html"); ); // let's get to work println!("getting to work"); let contents = fs::read_to_string(file).expect("Something went wrong reading the file"); let html_doc = Html::parse_document(&contents); let div_message = Selector::parse(".message").unwrap(); let div_selector = Selector::parse("div").unwrap(); for message in html_doc.select(&div_message) { let fragment = Html::parse_fragment(&message.inner_html()); println!( "message.value().attr(general) {:?}", message .value() .has_class("general", CaseSensitivity::CaseSensitive) ); for div in fragment.select(&div_selector) { println!("class {:?}", div.value().); let all_selector = Selector::parse("*").unwrap(); for child in div.select(&all_selector) { println!("child {:?}", child.html()); } } std::process::exit(2) } }
true
c27d0fdc60a98234aa6808f4f268c052880de89e
Rust
drichardson/examples
/rust/rustbook/ch8/vectors/src/main.rs
UTF-8
1,185
4.21875
4
[ "Unlicense" ]
permissive
fn main() { let v: Vec<i32> = Vec::new(); println!("v={:?}", v); // using the vec! macro. Type inferred. let v = vec![1, 2, 3]; println!("v={:?}", v); let mut v = Vec::new(); v.push(5); v.push(6); v.push(7); v.push(8); println!("v={:?}", v); // Dropping a vector drops it's elements { let v = vec![1, 2, 3, 4]; println!("v={:?}", v); } // <- v goes out of scope and is freed here let v = vec![1, 2, 3, 4, 5]; let third: &i32 = &v[2]; println!("The third element is {}", third); match v.get(2) { Some(third) => println!("The third element is {}", third), None => println!("There is no third elements."), } // two ways to get items from a vector: &[] and .get // let does_not_exist = &v[100]; // will crash with 'main panicked at index is out of bounds' let does_not_exist = v.get(100); // returns None. print!("does_not_exist={:?}", does_not_exist); let v = vec![1, 2, 3, 4, 5]; let first = &v[0]; // v.push(6); // error: cannot borrow 'v' as mutable because it is also borrowed as immutable println!("The first elemetn is: {}", first); }
true
0d32435ddde3a0155929486607f1d4122c7cd502
Rust
MarkZuber/rpiled
/rledsvr/src/tasks/displaytext.rs
UTF-8
1,133
2.734375
3
[ "MIT" ]
permissive
use crate::taskmgr::TaskError; use core::jobs::{Cancellable, LoopState}; use core::TextBlock; use rpiledbind::MatrixFont; use rpiledbind::MatrixHolder; pub struct DisplayTextTask { matrix: MatrixHolder, text_blocks: Vec<TextBlock>, } impl DisplayTextTask { pub fn new(matrix: &MatrixHolder, text_blocks: Vec<TextBlock>) -> Self { Self { matrix: matrix.clone(), text_blocks: text_blocks, } } } impl Cancellable for DisplayTextTask { type Error = TaskError; fn for_each(&mut self) -> Result<LoopState, Self::Error> { let mut matrix = self.matrix.lock_matrix(); matrix.clear(); let kerning_offset: i32 = 0; for block in &self.text_blocks { let font = MatrixFont::new(&block.font_path); matrix.draw_text( &font, block.x, block.y, block.r, block.g, block.b, &block.text, kerning_offset, ); } matrix.swap_canvas(); Ok(LoopState::Continue) } }
true
2fafba7fcfebc214bc03bf97a52909a0dc6dd3cc
Rust
196Ikuchil/nes_emulator
/src/nes/rom/mod.rs
UTF-8
302
2.921875
3
[ "MIT" ]
permissive
use super::types::{Data, Addr}; #[derive(Debug)] pub struct Rom { vec: Vec<Data>, } impl Rom { pub fn new(buf: Vec<Data>) -> Rom { Rom { vec: buf.clone() } } pub fn read(&self, addr: u32) -> Data { self.vec[addr as usize] } pub fn size(&self) -> usize { self.vec.len() } }
true
d7b18859a2a4a42804e8002b025a83f980672e71
Rust
sd-EvandroLippert/data_types_01
/src/main.rs
UTF-8
2,900
3.71875
4
[]
no_license
#[allow(dead_code)] #[allow(unused_variables)] mod stack_and_heap; use std::mem; fn main() { fundamental_data_types(); operators(); scope_and_shadowing(); stack_and_heap::stack_and_heap(); } fn fundamental_data_types(){ // Data types // Integers // u = unsigned só maiores ou igual a 0 // i = signed negativos e positivos // u8, u16, u32, u64, i8, i16, ... // Strings // char // Float // f32, f64 // Todos eles são signed // Integers // u = unsigned, 8 = 8 bits, unsigned variables just accept positive integers // 0 - 255 let a: u8 = 255; println!("{}", a); // Criar uma variável mutável // Devemos adicionar o mut antes da variávle // i = signed; 8 = 8 bits, variáveis signed aceitam valores negativos e positivos // -128 - 127 let mut b: i8 = -55; println!("b = {} before", b); b = 100; println!("b = {} after", b); // Verificar o espaço que um número ocupa na memória let mut c = 123456789; println!("c = {}, and it takes up {} bytes", c, mem::size_of_val(&c)); c = 10; println!("c = {}, and it takes up {} bytes", c, mem::size_of_val(&c)); // Char // Só uma letra delimitada por aspas simples let d: char = 'x'; println!("{} is a char, its size = {} bytes", d, mem::size_of_val(&d)); // Float let e: f32 = 2.1; println!("e = {}, its size = {} bytes", e, mem::size_of_val(&e)); let f: f64 = 2.145687; println!("f = {}, its size = {} bytes", f, mem::size_of_val(&f)); } fn operators(){ // arithmetic // Em rust existem os mesmos operadores que python // + / * - % // // As diferenças estão a cargo das operações de potenciação let a = (2 + 5 * 3) / 7; let a_cubed = i32::pow(a, 3); println!("{} na terceira poitência é {}", a, a_cubed); //Bitwise operators // | = OR; & = AND; ^ = XOR; ! = NOR; let c = 1 | 2; // 01 OR 10 = 11 == 3 println!("1|2 = {}", c); // Logical operators // <; >; <=; >=; == let three_less_than_five = 3 < 5; println!("3 is less than five: {}", three_less_than_five); } fn scope_and_shadowing() { let a = 123; { //Novo scope -> parte delimitada pelos {} let b = 456; // b só existirá dentro desse scope, // após esse trecho de código ser executado, b será apagado da memória println!("inside this scope b is equal {}", b); // Entretanto, a existe dentro desse scope, pois ela é do scope "pai" println!("a = {}", a); // Porém é possível escondê-la redeclarando uma variável a dentro desse scope let a = 7; println!("new a = {}", a) } // Fora do antigo scope a voltará a ter o valor inicial, declarado no inicio da função println!("a in the parent scope = {}", a) }
true
2d9753626014630851f8ba324fa4b92cf6df0869
Rust
oabrivard/efp
/rust/retirement_calc/src/main.rs
UTF-8
978
3.59375
4
[]
no_license
use std::io::stdin; use chrono::{Utc, Datelike}; fn read_positive_int() -> i32 { loop { let mut buffer = String::new(); stdin().read_line(&mut buffer).expect("error reading data"); if let Ok(result) = buffer.trim().parse::<i32>() { if result >= 0 {return result;} } println!("Please enter a number >= 0 :"); } } fn main() { println!("What is your current age?"); let age = read_positive_int(); println!("At what age would you like to retire?"); let retirement_age = read_positive_int(); let left_to_work = retirement_age - age; if left_to_work < 0 { println!("You can already retire!") } else { println!("You have {} years left until you can retire.", left_to_work); let current_year = Utc::now().year(); let retirement_year = current_year + left_to_work; println!("It's {}, so you can retire in {}.", current_year, retirement_year); } }
true
b724c00f2ddb2ced77cd6d140b35db59dc98a789
Rust
douban/redarrow-rs
/src/webclient.rs
UTF-8
4,140
2.765625
3
[ "BSD-3-Clause" ]
permissive
use std::sync::mpsc; use std::time::Duration; use anyhow::Result; use crate::{CommandParams, CommandResult}; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); #[derive(Debug)] pub struct Client { host: String, port: u32, command: String, arguments: Vec<String>, user_agent: String, connect_timeout: Duration, } impl Client { pub fn new(host: String, port: u32, command: String, arguments: Vec<String>) -> Self { Client { host: host, port: port, command: command, arguments: arguments, user_agent: format!("Redarrow-webclient/{}", VERSION), connect_timeout: Duration::new(3, 0), } } pub fn set_user_agent(self: &mut Self, ua: &str) { self.user_agent = format!("{}/{}", ua, VERSION); } pub fn set_connect_timeout(self: &mut Self, timeout: Duration) { self.connect_timeout = timeout; } fn build_url(self: &Self) -> String { format!( "http://{}:{}/command/{}", self.host, self.port, self.command ) } fn get_arguments(self: &Self) -> Option<String> { if self.arguments.is_empty() { None } else { Some(self.arguments.join(" ")) } } pub async fn run_command(self: &Self) -> Result<CommandResult> { let params = CommandParams { chunked: None, argument: self.get_arguments(), format: None, }; let body = reqwest::Client::builder() .user_agent(self.user_agent.as_str()) .connect_timeout(self.connect_timeout) .build()? .get(self.build_url().as_str()) .query(&params) .send() .await? .bytes() .await?; Ok(serde_json::from_slice(&body)?) } pub async fn run_realtime( self: &Self, tx: mpsc::Sender<(i8, Vec<u8>)>, ) -> Result<CommandResult> { let params = CommandParams { chunked: Some(1), argument: self.get_arguments(), format: None, }; let mut res = reqwest::Client::builder() .user_agent(self.user_agent.as_str()) .connect_timeout(self.connect_timeout) .build()? .get(self.build_url().as_str()) .query(&params) .send() .await?; let mut last_fd = -1; let mut tmp: Vec<u8> = Vec::new(); while let Some(chunk) = res.chunk().await? { let mut line_ends = false; match chunk.last() { None => { eprintln!("empty chunk received"); continue; } Some(char) => { if *char == b'\n' { line_ends = true; } } } if last_fd >= 0 { tmp.extend_from_slice(&chunk); if line_ends { tx.send((last_fd, tmp.clone()))?; last_fd = -1; tmp.clear(); } continue; } let fd = parse_fd(&chunk); match fd { 0 => { return Ok(serde_json::from_slice(&chunk[3..])?); } 1 | 2 => { if line_ends { tx.send((fd, chunk[3..].to_vec()))?; } else { tmp.extend_from_slice(&chunk[3..]); last_fd = fd; } } _ => { eprintln!("Response Error: {:?}", chunk); } } } Ok(CommandResult::err("Command Unfinished".to_string())) } } fn parse_fd(s: &[u8]) -> i8 { if s.len() < 3 { -1 } else { let (left, _) = s.split_at(3); match left { b"0> " => 0, b"1> " => 1, b"2> " => 2, _ => -1, } } }
true
83501348a209c4a0bc4d870b114e65d2e82d81ab
Rust
anderssonjohn/AdventOfCode
/rust_2015/src/day03.rs
UTF-8
2,190
3.515625
4
[]
no_license
use crate::utils::read_file; use std::collections::HashMap; enum Direction { North, West, South, East } #[derive(Clone, Copy, Eq, Hash, PartialEq)] struct Position {x: i32, y: i32} impl Position { pub fn new() -> Position { Position {x: 0, y: 0} } } pub fn main () { println!("Day 3"); let input = read_file("day03"); let mut map: HashMap<Position, u32> = HashMap::new(); map.insert(Position::new(), 0); let walk: Vec<Direction> = input.chars().map(char_to_dir).collect(); walk.iter().fold(Position::new(), |pos, dir| doo(&mut map, pos, dir)); println!("{}", map.len()); main2(); } pub fn main2 () { println!("Day 3, part 2"); let input = read_file("day03"); let mut map: HashMap<Position, u32> = HashMap::new(); map.insert(Position::new(), 0); let walk: Vec<Direction> = input.chars().map(char_to_dir).collect(); let mut index = 0; let (even, odd): (Vec<Direction>, Vec<Direction>) = walk.into_iter() .partition(|_| {index += 1; index % 2 == 0}); even.iter().fold(Position::new(), |pos, dir| doo(&mut map, pos, dir)); odd.iter().fold(Position::new(), |pos, dir| doo(&mut map, pos, dir)); println!("{}", map.len()); println!("{}", even.len()); println!("{}", odd.len()); } fn doo (map: &mut HashMap<Position, u32>, Position {x, y}: Position, dir: &Direction ) -> Position { let new_pos = match dir { Direction::North => Position {x, y: y + 1 }, Direction::West => Position { x: x - 1, y }, Direction::South => Position { x, y: y - 1 }, Direction::East => Position { x: x + 1, y } }; visit_post(map, &new_pos); return new_pos } fn visit_post (map: &mut HashMap<Position, u32>, pos: &Position) { let op_count = map.get_mut(pos); if let Some(count) = op_count { *count += 1; } else { map.insert(*pos, 1); } } fn char_to_dir (char: char) -> Direction { match char { '^' => Direction::North, 'v' => Direction::South, '>' => Direction::East, '<' => Direction::West, _ => panic!("Invalid character") } }
true
bfba7514f941a612103217010dc86012cbefc0b2
Rust
pipi32167/LeetCode
/rust/src/problem_0640.rs
UTF-8
2,589
3.46875
3
[]
no_license
use std::i32; use std::iter::FromIterator; #[derive(Debug)] struct Solution {} impl Solution { pub fn solve_equation(equation: String) -> String { let parse = || -> Vec<String> { let mut ret = vec![]; let mut chars = vec![]; let mut is_met_eq = false; for c in equation.chars() { if c == '=' { is_met_eq = true; if chars.len() > 0 { ret.push(String::from_iter(chars.clone())); } ret.push(String::from("-")); chars.clear(); continue; } if "+-".contains(c) { if chars.len() > 0 { ret.push(String::from_iter(chars.clone())); } if !is_met_eq { ret.push(c.to_string()); } else { let t = if c == '+' { "-" } else { "+" }; ret.push(String::from(t)); } chars.clear(); continue; } chars.push(c); } ret.push(String::from_iter(chars)); ret }; let tokens = parse(); // println!("{:?}", tokens); let get_coef_of_x = |x: String| -> i32 { let ret = x.trim_end_matches('x'); if ret.len() == 0 { return 1; } i32::from_str_radix(ret, 10).ok().unwrap() }; let mut x = 0; let mut y = 0; let mut positive = true; for token in tokens { if token == "+" || token == "-" { positive = if token == "+" { true } else { false }; continue; } let is_x = token.contains("x"); let target = if is_x { &mut x } else { &mut y }; let r = if is_x { get_coef_of_x(token) } else { i32::from_str_radix(&token, 10).ok().unwrap() }; *target += if positive { r } else { -r }; } if x == 0 { if y == 0 { return "Infinite solutions".to_owned(); } else { return "No solution".to_owned(); } } format!("x={}", -y / x) } } #[test] fn test() { let equation = "-x=-1".to_owned(); assert_eq!(Solution::solve_equation(equation), "x=1"); let equation = "x+5-3+x=6+x-2".to_owned(); assert_eq!(Solution::solve_equation(equation), "x=2"); let equation = "2x=x".to_owned(); assert_eq!(Solution::solve_equation(equation), "x=0"); let equation = "x=x".to_owned(); assert_eq!(Solution::solve_equation(equation), "Infinite solutions"); let equation = "2x+3x-6x=x+2".to_owned(); assert_eq!(Solution::solve_equation(equation), "x=-1"); let equation = "x=x+2".to_owned(); assert_eq!(Solution::solve_equation(equation), "No solution"); }
true
75799503cfc6024ffdd69ddcd51d7478ea814c5c
Rust
nushell/nushell
/crates/nu-cmd-lang/src/core_commands/while_.rs
UTF-8
3,758
2.859375
3
[ "MIT" ]
permissive
use nu_engine::{eval_block, eval_expression, CallExt}; use nu_protocol::ast::Call; use nu_protocol::engine::{Block, Command, EngineState, Stack}; use nu_protocol::{ Category, Example, PipelineData, ShellError, Signature, SyntaxShape, Type, Value, }; #[derive(Clone)] pub struct While; impl Command for While { fn name(&self) -> &str { "while" } fn usage(&self) -> &str { "Conditionally run a block in a loop." } fn signature(&self) -> nu_protocol::Signature { Signature::build("while") .input_output_types(vec![(Type::Nothing, Type::Nothing)]) .allow_variants_without_examples(true) .required("cond", SyntaxShape::MathExpression, "condition to check") .required( "block", SyntaxShape::Block, "block to loop if check succeeds", ) .category(Category::Core) } fn run( &self, engine_state: &EngineState, stack: &mut Stack, call: &Call, _input: PipelineData, ) -> Result<PipelineData, ShellError> { let cond = call.positional_nth(0).expect("checked through parser"); let block: Block = call.req(engine_state, stack, 1)?; loop { if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) { break; } let result = eval_expression(engine_state, stack, cond)?; match &result { Value::Bool { val, .. } => { if *val { let block = engine_state.get_block(block.block_id); match eval_block( engine_state, stack, block, PipelineData::empty(), call.redirect_stdout, call.redirect_stderr, ) { Err(ShellError::Break(_)) => { break; } Err(ShellError::Continue(_)) => { continue; } Err(err) => { return Err(err); } Ok(pipeline) => { let exit_code = pipeline.drain_with_exit_code()?; if exit_code != 0 { return Ok( PipelineData::new_external_stream_with_only_exit_code( exit_code, ), ); } } } } else { break; } } x => { return Err(ShellError::CantConvert { to_type: "bool".into(), from_type: x.get_type().to_string(), span: result.span(), help: None, }) } } } Ok(PipelineData::empty()) } fn examples(&self) -> Vec<Example> { vec![Example { description: "Loop while a condition is true", example: "mut x = 0; while $x < 10 { $x = $x + 1 }", result: None, }] } } #[cfg(test)] mod test { use super::*; #[test] fn test_examples() { use crate::test_examples; test_examples(While {}) } }
true
614e52d3603f47d6a5e1a2e2dc12e098aeb59326
Rust
rowhit/scirust
/sralgebra/src/commutative_ring.rs
UTF-8
3,934
3.421875
3
[ "Apache-2.0" ]
permissive
#![doc="Defines the commutative ring algebraic structure. A commutative ring is a ring where the multiplication operation is commutative. A commutative ring is a set R equipped with binary operations + and * satisfying the following nine axioms: R is an abelian group under addition, meaning: * (a + b) + c = a + (b + c) for all a, b, c in R (+ is associative). * There is an element 0 in R such that a + 0 = a and 0 + a = a for all a in R (0 is the additive identity). * For each a in R there exists −a in R such that a + (−a) = (−a) + a = 0 (−a is the additive inverse of a). * a + b = b + a for all a, b in R (+ is commutative). R is a commutative monoid under multiplication, meaning: * (a * b) * c = a * (b * c) for all a, b, c in R (* is associative). * There is an element 1 in R such that a * 1 = a and 1 * a = a for all a in R (1 is the multiplicative identity).[2] * a * b = b * a for all a, b in R (* is commutative). Multiplication distributes over addition: * a * (b + c) = (a * b) + (a * c) for all a, b, c in R (left distributivity). * (b + c) * a = (b * a) + (c * a) for all a, b, c in R (right distributivity). References: * http://en.wikipedia.org/wiki/Algebraic_structure * http://en.wikipedia.org/wiki/Group_(mathematics) * http://en.wikipedia.org/wiki/Monoid * http://en.wikipedia.org/wiki/Ring_(mathematics) "] use monoid::{CommutativeMonoidMulPartial, CommutativeMonoidMul}; use ring::{RingPartial, Ring}; /// Commutative ring with partial equivalence pub trait CommutativeRingPartial : RingPartial + CommutativeMonoidMulPartial { fn prop_multiplication_is_commutative(a : Self, b : Self) -> bool { CommutativeMonoidMulPartial::prop_is_commutative(a, b) } fn check_all_properties(a: Self, b: Self, c : Self)-> bool { if !RingPartial::check_all_properties( a.clone(), b.clone(), c.clone()) { return false; } if !CommutativeRingPartial::prop_multiplication_is_commutative(a.clone(), b.clone()){ return false; } if !CommutativeRingPartial::prop_multiplication_is_commutative(b.clone(), c.clone()){ return false; } if !CommutativeRingPartial::prop_multiplication_is_commutative(a.clone(), c.clone()){ return false; } true } } impl<T> CommutativeRingPartial for T where T: RingPartial + CommutativeMonoidMulPartial {} /////////////////////////////////////////////////////////// /// Commutative ring with full equivalence pub trait CommutativeRing : CommutativeRingPartial + Ring + CommutativeMonoidMul{ fn check_all_properties(a: Self, b: Self, c : Self)-> bool { CommutativeRingPartial::check_all_properties(a, b, c) } } impl<T> CommutativeRing for T where T: Ring + CommutativeMonoidMul { } #[cfg(test)] mod tests { use super::*; #[test] fn test_commutative_ring_partial() { assert!(CommutativeRingPartial::check_all_properties(2i8, 3i8, 1i8)); assert!(CommutativeRingPartial::check_all_properties(2i16, 3i16, 1i16)); assert!(CommutativeRingPartial::check_all_properties(2i32, 3i32, 1i32)); assert!(CommutativeRingPartial::check_all_properties(2i64, 3i64, 1i64)); assert!(CommutativeRingPartial::check_all_properties(2f32, 3f32, 1f32)); assert!(CommutativeRingPartial::check_all_properties(2f64, 3f64, 1f64)); } #[test] fn test_commutative_ring() { assert!(CommutativeRing::check_all_properties(2i8, 3i8, 1i8)); assert!(CommutativeRing::check_all_properties(2i16, 3i16, 1i16)); assert!(CommutativeRing::check_all_properties(2i32, 3i32, 1i32)); assert!(CommutativeRing::check_all_properties(2i64, 3i64, 1i64)); // following cannot work //assert!(Ring::check_all_properties(2f64, 3f64, 1f64)); //assert!(Ring::check_all_properties(2u64, 3u64, 1u64)); } }
true
dfc0d32a3adaf825eac3524c6919e2bd17d84fc8
Rust
Niedzwiedzw/rust-drivers-playground
/src/main.rs
UTF-8
2,353
2.8125
3
[]
no_license
mod command_ids; use rppal::{ gpio::{ Gpio, OutputPin, }, spi::{ Bus, SlaveSelect, Spi, Mode, }, }; use std::thread; use std::time::Duration; struct Ssd1306Display { spi_channel: Spi, dc_pin: OutputPin, } impl Ssd1306Display { pub fn new() -> Self { // this pin's state tells the device whether it's receiving chunk of data (HIGH) or a command (LOW) let dc_pin = Gpio::new() .expect("unable to initialize GPIO") .get(36) .expect("unable to access D/C Pin") .into_output(); let spi_channel = Spi::new( Bus::Spi0, SlaveSelect::Ss0, 9999, Mode::Mode0, ).expect("Unable to initialize device."); Self { spi_channel, dc_pin } } pub fn initialize(&mut self) { self.command(command_ids::DISPLAYOFF); self.command(command_ids::SETDISPLAYCLOCKDIV); self.command(0x80); self.command(command_ids::SETMULTIPLEX); self.command(0x3F); self.command(command_ids::SETDISPLAYOFFSET); self.command(0x0); self.command(command_ids::SETSTARTLINE | 0x0); self.command(command_ids::CHARGEPUMP); self.command(0x14); self.command(command_ids::MEMORYMODE); self.command(0x00); self.command(command_ids::SEGREMAP | 0x1); self.command(command_ids::COMSCANDEC); self.command(command_ids::SETCOMPINS); self.command(0x12); self.command(command_ids::SETCONTRAST); self.command(0xCF); self.command(command_ids::SETPRECHARGE); self.command(0xF1); self.command(command_ids::SETVCOMDETECT); self.command(0x40); self.command(command_ids::DISPLAYALLON_RESUME); self.command(command_ids::NORMALDISPLAY); self.command(command_ids::DISPLAYON); } pub fn command(&mut self, command: u8) { self.dc_pin .set_low(); self.spi_channel .write(&[command]) .expect("failed to send the command"); } } fn main() { let mut device = Ssd1306Display::new(); device.initialize(); device.command(command_ids::DISPLAYALLON); thread::sleep(Duration::from_micros(5000000)); println!("Hello, raspberry!"); }
true
52810575372941afa08e01a18777ebf370ce3a77
Rust
maxim-komar/learning-rust
/rust-by-example/trait/dyn/dyn.rs
UTF-8
1,589
4.09375
4
[]
no_license
// The Rust compiler needs to know how much space every function's return // type requires. This means all your functions have to return a concrete // type. Unlike other languages, if you have a trait like `Animal`, you // can't write a function that returns `Animal`, because its different // implementations will need different amounts of memory. // // However, there's an easy workaround. Instead of returning a trait object // directly, our functions return a `Box` which contains some `Animal`. // A `box` is just a reference to some memory in the heap. Beacause a // reference has a statically-known size, and the compiler can guarantee it // points to a heap-allocated `Animal`, we can return a trait from our // function. // // Rust tries to be as explicit as possible whenever it allocates memory // on the heap. So if your function returns a pointer-to-trait-on-heap in // this way, you need to write the return type with the `dyn` keyword, // e.g. `Box<dyn Animal>` struct Sheep {} struct Cow {} trait Animal { fn noise(&self) -> &'static str; } impl Animal for Sheep { fn noise(&self) -> &'static str { "baaah!" } } impl Animal for Cow { fn noise(&self) -> &'static str { "moooo!" } } fn random_animal(random_number: f64) -> Box<dyn Animal> { if random_number < 0.5 { Box::new(Sheep {}) } else { Box::new(Cow {}) } } fn main() { let random_number = 0.234; let animal = random_animal(random_number); println!("You've randomly chosen an animal, and it says {}", animal.noise() ); }
true
f5970deb28f2d86ca9e595d2a08069b244cecf8b
Rust
FarhanAliRaza/Learning-Rust
/src/print.rs
UTF-8
619
3.640625
4
[]
no_license
pub fn run(){ println!("Hello "); //can not print number directly println!("{}", 1); //basic frmating println!("My name is {} and i am from {}", "farhan", "Pakistan"); //positional argument println!("My name is {0} and {0} is from {1}", "faryhan", "Pakistan"); //named arguments println!("{name} likes to {activity}", name = "farhan", activity = "code" ); //placeholder traits println!("Binary: {:b} Hex : {:x} Octal : {:o}", 10, 10, 10); //placeholder fro ebug traits println!("{:?}", (true, "hello", 1)); //basic math println!("10 + 10 = {}", 10 + 10); }
true
153571032b909ece8d83ffb2977dacbf1ef7549f
Rust
CosmWasm/terra-contracts
/contracts/maker/tests/integration.rs
UTF-8
3,859
2.78125
3
[ "Apache-2.0" ]
permissive
//! This integration test tries to run and call the generated wasm. //! It depends on a Wasm build being available, which you can create with `cargo wasm`. //! Then running `cargo integration-test` will validate we can properly call into that generated Wasm. //! //! You can easily convert unit tests to integration tests. //! 1. First copy them over verbatum, //! 2. Then change //! let mut deps = mock_dependencies(20, &[]); //! to //! let mut deps = mock_instance(WASM, &[]); //! 3. If you access raw storage, where ever you see something like: //! deps.storage.get(CONFIG_KEY).expect("no data stored"); //! replace it with: //! deps.with_storage(|store| { //! let data = store.get(CONFIG_KEY).expect("no data stored"); //! //... //! }); //! 4. Anywhere you see query(&deps, ...) you must replace it with query(&mut deps, ...) use cosmwasm_std::{ coin, coins, from_binary, Coin, CosmosMsg, HandleResponse, InitResponse, Uint128, }; use cosmwasm_vm::testing::{ handle, init, mock_dependencies, mock_env, query, MockApi, MockQuerier, MockStorage, }; use cosmwasm_vm::{Api, Instance}; use terra_bindings::{SwapMsg, TerraMsg}; use maker::msg::{ConfigResponse, HandleMsg, InitMsg, QueryMsg}; // This line will test the output of cargo wasm static WASM: &[u8] = include_bytes!("../target/wasm32-unknown-unknown/release/maker.wasm"); // You can uncomment this line instead to test productionified build from cosmwasm-opt // static WASM: &[u8] = include_bytes!("../contract.wasm"); const DEFAULT_GAS_LIMIT: u64 = 500_000; // TODO: improve the whole state of this pub fn mock_instance( wasm: &[u8], contract_balance: &[Coin], ) -> Instance<MockStorage, MockApi, MockQuerier> { // TODO: check_wasm is not exported from cosmwasm_vm // let terra_features = features_from_csv("staking,terra"); // check_wasm(wasm, &terra_features).unwrap(); let deps = mock_dependencies(20, contract_balance); Instance::from_code(wasm, deps, DEFAULT_GAS_LIMIT).unwrap() } #[test] fn proper_initialization() { let mut deps = mock_instance(WASM, &[]); let msg = InitMsg { ask: "BTC".into(), offer: "ETH".into(), }; let env = mock_env(&deps.api, "creator", &coins(1000, "earth")); // we can just call .unwrap() to assert this was a success let res: InitResponse<TerraMsg> = init(&mut deps, env, msg).unwrap(); assert_eq!(0, res.messages.len()); // it worked, let's query the state let res = query(&mut deps, QueryMsg::Config {}).unwrap(); let value: ConfigResponse = from_binary(&res).unwrap(); assert_eq!("BTC", value.ask.as_str()); assert_eq!("ETH", value.offer.as_str()); assert_eq!("creator", value.owner.as_str()); } #[test] fn buy_limit() { let mut deps = mock_instance(WASM, &coins(200, "ETH")); let msg = InitMsg { ask: "BTC".into(), offer: "ETH".into(), }; let env = mock_env(&deps.api, "creator", &coins(200, "ETH")); let _res: InitResponse<TerraMsg> = init(&mut deps, env, msg).unwrap(); // we buy BTC with half the ETH let env = mock_env(&deps.api, "creator", &[]); let contract_addr = deps.api.human_address(&env.contract.address).unwrap(); let msg = HandleMsg::Buy { limit: Some(Uint128(100)), }; let res: HandleResponse<TerraMsg> = handle(&mut deps, env, msg).unwrap(); // make sure we produce proper trade order assert_eq!(1, res.messages.len()); if let CosmosMsg::Custom(TerraMsg::Swap(SwapMsg::Trade { trader_addr, offer_coin, ask_denom, })) = &res.messages[0] { assert_eq!(trader_addr, &contract_addr); assert_eq!(offer_coin, &coin(100, "ETH")); assert_eq!(ask_denom, "BTC"); } else { panic!("Expected swap message, got: {:?}", &res.messages[0]); } }
true
47a3f9ef3a7942477ac3a31a21a2215c1d8ec721
Rust
jihoonson/iron-arrow
/src/common/mod.rs
UTF-8
14,757
2.703125
3
[ "Apache-2.0" ]
permissive
pub mod status; pub mod ty; pub mod bit_util; pub mod field; use std::collections::HashMap; #[derive(Debug, Eq, PartialEq)] pub struct KeyValueMetadata { keys: Vec<String>, values: Vec<String> } impl KeyValueMetadata { pub fn new() -> KeyValueMetadata { KeyValueMetadata { keys: Vec::new(), values: Vec::new() } } pub fn with_kvs(keys: Vec<String>, values: Vec<String>) -> KeyValueMetadata { KeyValueMetadata { keys, values } } pub fn append(&mut self, key: String, val: String) { self.keys.push(key); self.values.push(val); } pub fn reserve(&mut self, n: i64) { if n >= 0 { let m = n as usize; self.keys.reserve(m); self.values.reserve(m); } else { panic!(); } } pub fn key(&self, i: i64) -> &String { &self.keys[i as usize] } pub fn value(&self, i: i64) -> &String { &self.values[i as usize] } pub fn len(&self) -> i64 { if self.keys.len() == self.values.len() { self.keys.len() as i64 } else { panic!(); } } pub fn to_unordered_map(&self) -> HashMap<String, String> { let len = self.len() as usize; let mut map = HashMap::with_capacity(len); for i in 0..len { map.insert(self.keys[i].clone(), self.values[i].clone()); } map } } impl Clone for KeyValueMetadata { fn clone(&self) -> Self { KeyValueMetadata { keys: self.keys.clone(), values: self.values.clone() } } } #[cfg(test)] mod tests { use common::ty::*; use common::field::*; #[test] fn test_field() { use common::KeyValueMetadata; let field = Field::new(String::from("f1"), Ty::null()); assert_eq!("f1", field.name().as_str()); assert_eq!(&Ty::NA, field.data_type()); assert_eq!(true, field.nullable()); assert!(field.metadata().is_none()); let field = Field::non_null(String::from("f2"), Ty::float()); assert_eq!("f2", field.name().as_str()); assert_eq!(&Ty::Float, field.data_type()); assert_eq!(false, field.nullable()); assert!(field.metadata().is_none()); let mut metadata = KeyValueMetadata::new(); metadata.append(String::from("k1"), String::from("v1")); metadata.append(String::from("k2"), String::from("v2")); metadata.append(String::from("k3"), String::from("v3")); let expected_metadata = metadata.clone(); let field = Field::new_with_metadata(String::from("f3"), Ty::int64(), metadata); assert_eq!("f3", field.name().as_str()); assert_eq!(&Ty::Int64, field.data_type()); assert_eq!(true, field.nullable()); assert_eq!(&Some(expected_metadata), field.metadata()); } #[test] fn test_null() { let ty = Ty::null(); assert_eq!(Ty::NA, ty); assert_eq!("null", ty.name()); assert_eq!(Vec::<BufferDesc>::new(), ty.get_buffer_layout()); } #[test] fn test_boolean() { let ty = Ty::bool(); assert_eq!(Ty::Bool, ty); assert_eq!("bool", ty.name()); assert_eq!( vec![BufferDesc::validity_buffer(), BufferDesc::new(BufferType::Data, 1)], ty.get_buffer_layout() ); } macro_rules! test_primitive_types { ($test_name: ident, $type_name: ident, $str_name: expr, $ty: path, $width: expr, $buffer_layout: expr) => ( #[test] fn $test_name() { let ty = Ty::$type_name(); assert_eq!($ty, ty); assert_eq!($str_name, ty.name()); assert_eq!($width, ty.bit_width()); assert_eq!($buffer_layout, ty.get_buffer_layout()); } ); } test_primitive_types!(test_uint8_fixed_width, uint8, "uint8", Ty::UInt8, 8, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(8)]); test_primitive_types!(test_uint16_fixed_width, uint16, "uint16", Ty::UInt16, 16, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(16)]); test_primitive_types!(test_uint32_fixed_width, uint32, "uint32", Ty::UInt32, 32, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(32)]); test_primitive_types!(test_uint64_fixed_width, uint64, "uint64", Ty::UInt64, 64, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(64)]); test_primitive_types!(test_int8_fixed_width, int8, "int8", Ty::Int8, 8, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(8)]); test_primitive_types!(test_int16_fixed_width, int16, "int16", Ty::Int16, 16, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(16)]); test_primitive_types!(test_int32_fixed_width, int32, "int32", Ty::Int32, 32, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(32)]); test_primitive_types!(test_int64_fixed_width, int64, "int64", Ty::Int64, 64, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(64)]); test_primitive_types!(test_half_float_fixed_width, halffloat, "halffloat", Ty::HalfFloat, 16, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(16)]); test_primitive_types!(test_float_fixed_width, float, "float", Ty::Float, 32, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(32)]); test_primitive_types!(test_double_fixed_width, double, "double", Ty::Double, 64, vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(64)]); #[test] fn test_integers_signed() { assert!(Ty::int8().is_signed()); assert!(Ty::int16().is_signed()); assert!(Ty::int32().is_signed()); assert!(Ty::int64().is_signed()); assert!(!Ty::uint8().is_signed()); assert!(!Ty::uint16().is_signed()); assert!(!Ty::uint32().is_signed()); assert!(!Ty::uint64().is_signed()); } #[test] fn test_floats() { assert_eq!(Precision::Half, Ty::halffloat().precision()); assert_eq!(Precision::Single, Ty::float().precision()); assert_eq!(Precision::Double, Ty::double().precision()); } #[test] fn test_timestamp() { let ty = Ty::timestamp(); assert_eq!(Ty::Timestamp { unit: TimeUnit::Milli, timezone: String::new() }, ty); assert_eq!("timestamp", ty.name()); assert_eq!(64, ty.bit_width()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(64)], ty.get_buffer_layout()); assert_eq!(&TimeUnit::Milli, ty.time_unit()); } #[test] fn test_time() { let ty = Ty::time64(); assert_eq!(Ty::Time64 { unit: TimeUnit::Milli }, ty); assert_eq!("time64", ty.name()); assert_eq!(64, ty.bit_width()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(64)], ty.get_buffer_layout()); assert_eq!(&TimeUnit::Milli, ty.time_unit()); let ty = Ty::time32(); assert_eq!(Ty::Time32 { unit: TimeUnit::Milli }, ty); assert_eq!("time32", ty.name()); assert_eq!(32, ty.bit_width()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(32)], ty.get_buffer_layout()); assert_eq!(&TimeUnit::Milli, ty.time_unit()); } #[test] fn test_interval() { let ty = Ty::interval(); assert_eq!(Ty::Interval { unit: IntervalUnit::YearMonth }, ty); assert_eq!("interval", ty.name()); assert_eq!(64, ty.bit_width()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(64)], ty.get_buffer_layout()); assert_eq!(&IntervalUnit::YearMonth, ty.interval_unit()); } #[test] fn test_date() { let ty = Ty::date32(); assert_eq!(Ty::Date32 { unit: DateUnit::Milli }, ty); assert_eq!("date32", ty.name()); assert_eq!(32, ty.bit_width()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(32)], ty.get_buffer_layout()); assert_eq!(&DateUnit::Milli, ty.date_unit()); let ty = Ty::date64_with_unit(DateUnit::Day); assert_eq!(Ty::Date64 { unit: DateUnit::Day }, ty); assert_eq!("date64", ty.name()); assert_eq!(64, ty.bit_width()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(64)], ty.get_buffer_layout()); assert_eq!(&DateUnit::Day, ty.date_unit()); } #[test] fn test_binary() { let ty = Ty::binary(); assert_eq!(Ty::Binary, ty); assert_eq!("binary", ty.name()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::offset_buffer(), BufferDesc::data_buffer(8)], ty.get_buffer_layout()); } #[test] fn test_string() { let ty = Ty::string(); assert_eq!(Ty::String, ty); assert_eq!("utf8", ty.name()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::offset_buffer(), BufferDesc::data_buffer(8)], ty.get_buffer_layout()); } #[test] fn test_decimal() { let ty = Ty::decimal(5, 2); assert_eq!(Ty::Decimal { precision: 5, scale: 2 }, ty); assert_eq!("decimal", ty.name()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::data_buffer(128)], ty.get_buffer_layout()); assert_eq!(5, ty.decimal_precision()); assert_eq!(2, ty.decimal_scale()); } #[test] fn test_list() { use std; let ty = Ty::list(Box::new(Ty::timestamp())); assert_eq!(Ty::List { value_type: Box::new(Ty::timestamp()) }, ty); assert_eq!("list", ty.name()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::offset_buffer()], ty.get_buffer_layout()); assert_eq!(&Box::new(Ty::timestamp()), ty.list_value_type()); let timestamp_ty = ty.list_value_type(); assert_eq!(&Ty::timestamp(), timestamp_ty.as_ref()); } #[test] fn test_struct() { let fields = vec![ Field::new(String::from("f1"), Ty::date32_with_unit(DateUnit::Day)), Field::new(String::from("f2"), Ty::int32()) ]; let ty = Ty::struct_type( fields.clone() ); assert_eq!(Ty::Struct { fields: fields.clone() }, ty); assert_eq!("struct", ty.name()); assert_eq!(vec![BufferDesc::validity_buffer()], ty.get_buffer_layout()); assert_eq!(2, ty.num_children()); assert_eq!(&Field::new(String::from("f1"), Ty::date32_with_unit(DateUnit::Day)), ty.child(0)); assert_eq!(&Field::new(String::from("f2"), Ty::int32()), ty.child(1)); } #[test] fn test_union() { let fields = vec![ Field::new(String::from("f1"), Ty::date32_with_unit(DateUnit::Day)), Field::new(String::from("f2"), Ty::int32()) ]; let type_codes = vec![0, 1, 2]; let ty = Ty::union( fields.clone(), type_codes.clone() ); assert_eq!(Ty::Union { fields: fields.clone(), type_codes: type_codes.clone(), mode: UnionMode::SPARSE }, ty); assert_eq!(&String::from("union"), ty.name()); assert_eq!(&vec![0, 1, 2], ty.union_type_codes()); assert_eq!(&UnionMode::SPARSE, ty.union_mode()); assert_eq!(2, ty.num_children()); assert_eq!(&Field::new(String::from("f1"), Ty::date32_with_unit(DateUnit::Day)), ty.child(0)); assert_eq!(&Field::new(String::from("f2"), Ty::int32()), ty.child(1)); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::type_buffer()], ty.get_buffer_layout()); let ty = Ty::union_with_mode( vec![ Field::new(String::from("f1"), Ty::date32_with_unit(DateUnit::Day)), Field::new(String::from("f2"), Ty::int32()) ], vec![0, 1, 2], UnionMode::DENSE ); assert_eq!(&UnionMode::DENSE, ty.union_mode()); assert_eq!(vec![BufferDesc::validity_buffer(), BufferDesc::type_buffer(), BufferDesc::offset_buffer()], ty.get_buffer_layout()); } // #[test] // fn test_dictionary() { // // TODO // } #[test] fn test_is_integer() { use array::Array; use builder::ArrayBuilder; use memory_pool::DefaultMemoryPool; use buffer::PoolBuffer; use std::sync::Arc; use std::cell::RefCell; assert!(Ty::uint8().is_integer()); assert!(Ty::uint16().is_integer()); assert!(Ty::uint32().is_integer()); assert!(Ty::uint64().is_integer()); assert!(Ty::int8().is_integer()); assert!(Ty::int16().is_integer()); assert!(Ty::int32().is_integer()); assert!(Ty::int64().is_integer()); assert_eq!(false, Ty::null().is_integer()); assert_eq!(false, Ty::bool().is_integer()); assert_eq!(false, Ty::halffloat().is_integer()); assert_eq!(false, Ty::float().is_integer()); assert_eq!(false, Ty::double().is_integer()); assert_eq!(false, Ty::string().is_integer()); assert_eq!(false, Ty::binary().is_integer()); assert_eq!(false, Ty::date64().is_integer()); assert_eq!(false, Ty::date32().is_integer()); assert_eq!(false, Ty::timestamp().is_integer()); assert_eq!(false, Ty::time32().is_integer()); assert_eq!(false, Ty::time64().is_integer()); assert_eq!(false, Ty::interval().is_integer()); assert_eq!(false, Ty::decimal(5, 2).is_integer()); assert_eq!(false, Ty::list(Box::new(Ty::int8())).is_integer()); assert_eq!(false, Ty::struct_type(vec![Field::new(String::from("f1"), Ty::int8())]).is_integer()); assert_eq!(false, Ty::union(vec![Field::new(String::from("f1"), Ty::int8())], vec![0]).is_integer()); let pool = Arc::new(RefCell::new(DefaultMemoryPool::new())); let null_bitmap = PoolBuffer::new(pool); let builder = ArrayBuilder::null(10); assert_eq!(false, Ty::dictionary(Box::new(Ty::int8()), Box::new(Array::from(builder))).is_integer()); } #[test] fn test_is_float() { use array::Array; use builder::ArrayBuilder; use memory_pool::DefaultMemoryPool; use buffer::PoolBuffer; use std::sync::Arc; use std::cell::RefCell; assert!(Ty::halffloat().is_float()); assert!(Ty::float().is_float()); assert!(Ty::double().is_float()); assert_eq!(false, Ty::null().is_float()); assert_eq!(false, Ty::bool().is_float()); assert_eq!(false, Ty::uint8().is_float()); assert_eq!(false, Ty::uint16().is_float()); assert_eq!(false, Ty::uint32().is_float()); assert_eq!(false, Ty::uint64().is_float()); assert_eq!(false, Ty::int8().is_float()); assert_eq!(false, Ty::int16().is_float()); assert_eq!(false, Ty::int32().is_float()); assert_eq!(false, Ty::int64().is_float()); assert_eq!(false, Ty::string().is_float()); assert_eq!(false, Ty::binary().is_float()); assert_eq!(false, Ty::date64().is_float()); assert_eq!(false, Ty::date32().is_float()); assert_eq!(false, Ty::timestamp().is_float()); assert_eq!(false, Ty::time32().is_float()); assert_eq!(false, Ty::time64().is_float()); assert_eq!(false, Ty::interval().is_float()); assert_eq!(false, Ty::decimal(5, 2).is_float()); assert_eq!(false, Ty::list(Box::new(Ty::int8())).is_float()); assert_eq!(false, Ty::struct_type(vec![Field::new(String::from("f1"), Ty::int8())]).is_float()); assert_eq!(false, Ty::union(vec![Field::new(String::from("f1"), Ty::int8())], vec![0]).is_float()); let pool = Arc::new(RefCell::new(DefaultMemoryPool::new())); let null_bitmap = PoolBuffer::new(pool); let builder = ArrayBuilder::null(10); assert_eq!(false, Ty::dictionary(Box::new(Ty::int8()), Box::new(Array::from(builder))).is_float()); } }
true
d5ebb981b687fc35173552e9dec279f257fa6e55
Rust
tillrohrmann/rust-challenges
/aoc_2020_5/src/lib.rs
UTF-8
1,397
3.703125
4
[ "Apache-2.0" ]
permissive
use std::str::FromStr; #[derive(Eq, PartialEq, Debug)] pub struct BoardingPass { row: usize, column: usize, } impl BoardingPass { pub fn new(row: usize, column: usize) -> BoardingPass { BoardingPass { row, column, } } fn parse_number(input: &str) -> usize { let mut result = 0; for char in input.chars() { result <<= 1; result |= match char { 'B' | 'R' => 1, 'F' | 'L' => 0, x => panic!("Unsupported") } } result } pub fn seat_id(&self) -> usize { self.row * 8 + self.column } } impl FromStr for BoardingPass { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.len() == 10 { let (row, column) = s.split_at(7); let row = BoardingPass::parse_number(row); let column = BoardingPass::parse_number(column); Ok(BoardingPass::new(row, column)) } else { Err(format!("Input {} does not contain 10 characters.", s)) } } } #[cfg(test)] mod tests { use crate::BoardingPass; #[test] fn simple_boarding_pass() { let boarding_pass: BoardingPass = "BFFFBBFRRR".parse::<BoardingPass>().unwrap(); assert_eq!(boarding_pass, BoardingPass::new(70, 7)); } }
true