blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
8d772e6588eaf7fbaa3012e3bfefce62f6255328
|
Rust
|
yatender-oktalk/fun-rust
|
/play_with_vectors/src/main.rs
|
UTF-8
| 1,025 | 3.921875 | 4 |
[] |
no_license
|
//!
#[derive(Debug)]
struct Human {
name: String,
age: Option<u16>,
}
impl Human {
pub fn new(n: String, a: Option<u16>) -> Self {
Self { name: n, age: a }
}
}
fn main() {
let mut v: Vec<i32> = Vec::new();
push_in_list(&mut v, 23);
push_in_list(&mut v, 33);
push_in_list(&mut v, 53);
println!("{:?}", v);
let mut v_human: Vec<Human> = Vec::new();
let human_1 = Human::new(String::from("Yatender"), Some(10));
let human_2 = Human::new(String::from("milky silky"), Some(2));
push_into_vec(&mut v_human, human_1);
push_into_vec(&mut v_human, human_2);
for i in &v_human {
println!("{}", is_present(&i));
}
println!("{:?}", v_human);
}
fn is_present(human: &Human) -> bool {
match human.age {
Some(10) => true,
_x => false,
}
}
fn push_in_list(v: &mut Vec<i32>, number: i32) -> &Vec<i32> {
v.push(number);
v
}
fn push_into_vec(v: &mut Vec<Human>, human: Human) -> &Vec<Human> {
v.push(human);
v
}
| true |
9918bc0a073d3851844300ad32d5ef2e83dd93a8
|
Rust
|
kklibo/panorama-explorer
|
/src/photo.rs
|
UTF-8
| 5,720 | 3.109375 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use std::rc::Rc;
use std::fmt::{Display,Formatter};
use three_d::{Mat4,Texture,InnerSpace};
use serde::{Serialize, Deserialize, Serializer};
use serde::ser::SerializeStruct;
pub use crate::entities::LoadedImageMesh;
use crate::viewport_geometry::{WorldCoords, PixelCoords};
use crate::world_rectangle::{WorldRectangle,LocalCoords};
pub struct Photo {
pub loaded_image_mesh: Rc<LoadedImageMesh>,
///this Photo's world space orientation:
///* scales 1 (unwarped) pixel to 1 WorldCoords unit
///* translates center from world origin in WorldCoords units
///* rotates around photo center
orientation: WorldRectangle,
}
//todo: make this complete?
impl Serialize for Photo {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where
S: Serializer {
let mut state = serializer.serialize_struct("Photo", 2)?;
state.serialize_field("translate", &self.orientation.translate)?;
state.serialize_field("rotate", &self.orientation.rotate)?;
state.end()
}
}
impl Display for Photo {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
writeln!(f, "scale: {:?}", self.orientation.scale)?;
writeln!(f, "translate: {:?}", self.orientation.translate)?;
Ok(())
}
}
impl Photo {
pub fn from_loaded_image_mesh(m: Rc<LoadedImageMesh>) -> Self {
let orientation = WorldRectangle::new(m.texture_2d.width() as f32,m.texture_2d.height() as f32);
Self {
loaded_image_mesh: m,
orientation,
}
}
pub fn set_from_json_serde_string(&mut self, s: &str) -> Result<(), Box<dyn std::error::Error>> {
#[derive(Deserialize)]
struct SavedFields {
translate: Mat4,
rotate: Mat4,
}
let saved_fields: SavedFields = serde_json::from_str(s)?;
self.orientation.translate = saved_fields.translate;
self.orientation.rotate = saved_fields.rotate;
Ok(())
}
pub fn orientation(&self) -> &WorldRectangle {
&self.orientation
}
pub fn set_translation(&mut self, center: WorldCoords) {
self.orientation.set_translation(center)
}
pub fn set_rotation(&mut self, angle: f32) {
self.orientation.set_rotation(angle)
}
pub fn rotate_around_point(&mut self, angle: f32, point: WorldCoords) {
self.orientation.rotate_around_point(angle, point)
}
/// gets the WorldCoords location of pixel coords in this photo
pub fn world_coords(&self, pixel_coords: PixelCoords) -> WorldCoords {
Self::world_coords_impl(&self.orientation, pixel_coords)
}
fn world_coords_impl(world_rectangle: &WorldRectangle, pixel_coords: PixelCoords) -> WorldCoords {
let local_coords = Self::local_coords(world_rectangle, pixel_coords);
world_rectangle.world_coords(local_coords)
}
fn local_coords(world_rectangle: &WorldRectangle, pixel_coords: PixelCoords) -> LocalCoords {
let width = world_rectangle.scale.x.magnitude() as f64;
let height = world_rectangle.scale.y.magnitude() as f64;
let local_x =
if width == 0.0 {
//if width is somehow 0, center on origin
0.0
}
else {
//scale to width = 1
let x = pixel_coords.x / width;
//center on origin
let x = x - 0.5;
x
};
let local_y =
if height == 0.0 {
//if height is somehow 0, center on origin
0.0
}
else {
//scale to width = 1
let y = pixel_coords.y / height;
//center on origin
let y = y - 0.5;
//flip y-coords to positive = up
let y = -y;
y
};
LocalCoords{x: local_x, y: local_y}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn world_coords_test() {
//at origin, no rotation
{
let mut orientation = WorldRectangle::new(200.0, 100.0);
orientation.set_rotation(0.0);
orientation.set_translation(WorldCoords { x: 0.0, y: 0.0 });
//top left corner
{
let pixel_coords = PixelCoords { x: 0.0, y: 0.0 };
let world_coords = Photo::world_coords_impl(&orientation, pixel_coords);
assert_eq!(world_coords, WorldCoords { x: -100.0, y: 50.0 });
}
//bottom right corner
{
let pixel_coords = PixelCoords { x: 200.0, y: 100.0 };
let world_coords = Photo::world_coords_impl(&orientation, pixel_coords);
assert_eq!(world_coords, WorldCoords { x: 100.0, y: -50.0 });
}
}
//rotated + translated
{
let mut orientation = WorldRectangle::new(200.0, 100.0);
orientation.set_rotation(90.0);
let x = 2000.0;
let y = 1000.0;
orientation.set_translation(WorldCoords {x,y});
//top left corner
{
let pixel_coords = PixelCoords { x: 0.0, y: 0.0 };
let world_coords = Photo::world_coords_impl(&orientation, pixel_coords);
assert_eq!(world_coords, WorldCoords { x: x - 50.0, y: y - 100.0 });
}
//bottom right corner
{
let pixel_coords = PixelCoords { x: 200.0, y: 100.0 };
let world_coords = Photo::world_coords_impl(&orientation, pixel_coords);
assert_eq!(world_coords, WorldCoords { x: x + 50.0, y: y + 100.0 });
}
}
}
}
| true |
2d434de272daa220cf3cf98bbd35ad2d1b81d34e
|
Rust
|
joephon/PoloDB
|
/src/polodb_core/meta_doc_helper.rs
|
UTF-8
| 3,245 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
use polodb_bson::{Document, Value, mk_document};
use std::rc::Rc;
use crate::DbResult;
use crate::error::DbErr;
/// root_btree schema
/// {
/// _id: String,
/// name: String,
/// root_pid: Int,
/// flags: Int,
/// }
///
/// flags indicates:
/// key_ty: 1byte
/// ...
///
pub(crate) struct MetaDocEntry {
name: String,
doc: Rc<Document>,
}
pub(crate) const KEY_TY_FLAG: u32 = 0b11111111;
impl MetaDocEntry {
pub fn new(id: u32, name: String, root_pid: u32) -> MetaDocEntry {
let doc = mk_document! {
"_id": id,
"name": name.clone(),
"root_pid": root_pid,
"flags": 0,
};
MetaDocEntry {
name,
doc: Rc::new(doc),
}
}
pub(crate) fn from_doc(doc: Rc<Document>) -> MetaDocEntry {
let name = doc.get(meta_doc_key::NAME).unwrap().unwrap_string();
MetaDocEntry {
name: name.into(),
doc,
}
}
#[allow(dead_code)]
pub(crate) fn name(&self) -> &str {
self.name.as_str()
}
pub(crate) fn root_pid(&self) -> u32 {
self.doc.get(meta_doc_key::ROOT_PID).unwrap().unwrap_int() as u32
}
pub(crate) fn set_root_pid(&mut self, new_root_pid: u32) {
let doc_mut = Rc::get_mut(&mut self.doc).unwrap();
doc_mut.insert(meta_doc_key::ROOT_PID.into(), Value::from(new_root_pid));
}
pub(crate) fn flags(&self) -> u32 {
self.doc.get(meta_doc_key::FLAGS).unwrap().unwrap_int() as u32
}
pub(crate) fn set_flags(&mut self, flags: u32) {
let doc_mut = Rc::get_mut(&mut self.doc).unwrap();
doc_mut.insert(meta_doc_key::FLAGS.into(), Value::from(flags));
}
#[inline]
fn key_ty(&self) -> u8 {
(self.flags() & KEY_TY_FLAG) as u8
}
pub(crate) fn check_pkey_ty(&self, primary_key: &Value, skipped: &mut bool) -> DbResult<()> {
let expected = self.key_ty();
if expected == 0 {
*skipped = true;
return Ok(())
}
let actual_ty = primary_key.ty_int();
if expected != actual_ty {
return Err(DbErr::UnexpectedIdType(expected, actual_ty))
}
Ok(())
}
pub(crate) fn merge_pkey_ty_to_meta(&mut self, value_doc: &Document) {
let pkey_ty = value_doc.pkey_id().unwrap().ty_int();
self.set_flags(self.flags() | ((pkey_ty as u32) & KEY_TY_FLAG));
}
#[inline]
pub(crate) fn doc_ref(&self) -> &Document {
self.doc.as_ref()
}
pub(crate) fn set_indexes(&mut self, indexes: Document) {
let doc_mut = Rc::get_mut(&mut self.doc).unwrap();
doc_mut.insert(meta_doc_key::INDEXES.into(), Value::from(indexes));
}
}
pub(crate) mod meta_doc_key {
pub(crate) static ID: &str = "_id";
pub(crate) static ROOT_PID: &str = "root_pid";
pub(crate) static NAME: &str = "name";
pub(crate) static FLAGS: &str = "flags";
pub(crate) static INDEXES: &str = "indexes";
pub(crate) mod index {
pub(crate) static NAME: &str = "name";
pub(crate) static V: &str = "v";
pub(crate) static UNIQUE: &str = "unique";
pub(crate) static ROOT_PID: &str = "root_pid";
}
}
| true |
a9723ebc52864227a7fde3e20647415f5ab8ffa0
|
Rust
|
davidpdrsn/oops-lang
|
/src/prep/mod.rs
|
UTF-8
| 9,760 | 2.703125 | 3 |
[] |
no_license
|
use crate::ast::{visit_ast, Ast, Visitor};
use crate::{
ast::{self, Ident},
error::{Error, Result},
interpret::{ClassVTable, VTable},
Span,
};
use std::{collections::HashMap, rc::Rc};
pub type Classes<'a> = VTable<'a, Rc<Class<'a>>>;
pub fn find_classes_and_methods<'a>(
ast: &'a Ast<'a>,
built_in_classes: Classes<'a>,
) -> Result<'a, Classes<'a>> {
let classes = find_classes(ast, built_in_classes)?;
find_methods(ast, classes)
}
fn find_classes<'a>(ast: &'a Ast<'a>, built_in_classes: Classes<'a>) -> Result<'a, Classes<'a>> {
let mut f = FindClasses {
table: built_in_classes,
};
visit_ast(&mut f, ast)?;
f.setup_super_classes()?;
Ok(f.table)
}
struct FindClasses<'a> {
table: Classes<'a>,
}
impl<'a> Visitor<'a> for FindClasses<'a> {
type Error = Error<'a>;
fn visit_define_class(&mut self, node: &'a ast::DefineClass<'a>) -> Result<'a, ()> {
let name = &node.name.class_name.0;
let key = name.name;
self.check_for_existing_class_with_same_name(key, node)?;
let fields = self.make_fields(node);
let super_class_name = &node.super_class.class_name.0;
let class = Class::new(name, super_class_name, fields, node.span);
self.table.insert(key, Rc::new(class));
Ok(())
}
}
impl<'a> FindClasses<'a> {
fn check_for_existing_class_with_same_name(
&self,
key: &'a str,
node: &'a ast::DefineClass<'a>,
) -> Result<'a, ()> {
if let Some(other) = self.table.get(key) {
Err(Error::ClassAlreadyDefined {
class: &key,
first_span: other.span,
second_span: node.span,
})
} else {
Ok(())
}
}
fn make_fields(&self, node: &'a ast::DefineClass<'a>) -> VTable<'a, Field<'a>> {
node.fields
.iter()
.map(|field| {
let ident = &field.ident;
let field = Field { name: ident };
(ident.name, field)
})
.collect()
}
fn setup_super_classes(&mut self) -> Result<'a, ()> {
let mut acc = HashMap::new();
for (class_name, class) in &self.table {
let super_class_name = &class.super_class_name;
// Object isn't supposed to have a super class
if class_name == &"Object" {
continue;
}
let super_class =
self.table
.get(&super_class_name.name)
.ok_or_else(|| Error::ClassNotDefined {
class: super_class_name.name,
span: class.span,
})?;
let super_class = Rc::clone(&super_class);
acc.insert(*class_name, (super_class, class.span));
}
for (class_name, (super_class, span)) in acc {
let mut class =
self.table
.get_mut(class_name)
.ok_or_else(|| Error::ClassNotDefined {
class: class_name,
span,
})?;
Rc::get_mut(&mut class)
.expect("Internal error: Rc borrowed mut more than once")
.super_class = Some(super_class);
}
Ok(())
}
}
struct FindMethods<'a> {
classes: Classes<'a>,
}
fn find_methods<'a>(ast: &'a Ast<'a>, classes: Classes<'a>) -> Result<'a, Classes<'a>> {
let mut f = FindMethods { classes };
visit_ast(&mut f, ast)?;
Ok(f.classes)
}
impl<'a> Visitor<'a> for FindMethods<'a> {
type Error = Error<'a>;
fn visit_define_method(&mut self, node: &'a ast::DefineMethod<'a>) -> Result<'a, ()> {
let method_name = &node.method_name.ident;
let key = method_name.name;
let class_name = &node.class_name.0.name;
{
let class = self
.classes
.get(class_name)
.ok_or_else(|| Error::ClassNotDefined {
class: class_name,
span: node.span,
})?;
self.check_for_existing_method_with_same_name(class, key, node)?;
}
let method = self.make_method(method_name, &node.block, node.span);
let mut class = self
.classes
.get_mut(class_name)
.ok_or_else(|| Error::ClassNotDefined {
class: class_name,
span: node.span,
})?;
let class = Rc::get_mut(&mut class)
.expect("Internal error: FindMethods.classes borrowed mut more than once");
class.methods.insert(key, method);
Ok(())
}
}
impl<'a> FindMethods<'a> {
fn check_for_existing_method_with_same_name(
&self,
class: &Class<'a>,
key: &'a str,
node: &'a ast::DefineMethod<'a>,
) -> Result<'a, ()> {
if let Some(other) = class.methods.get(key) {
return Err(Error::MethodAlreadyDefined {
class: class.name.name,
method: key,
first_span: other.span,
second_span: node.span,
});
} else {
Ok(())
}
}
fn make_method(
&self,
method_name: &'a Ident<'a>,
block: &'a ast::Block<'a>,
span: Span,
) -> Method<'a> {
Method {
name: method_name,
parameters: &block.parameters,
body: &block.body,
span,
}
}
}
#[derive(Debug)]
pub struct Class<'a> {
pub name: &'a Ident<'a>,
pub super_class_name: &'a Ident<'a>,
pub super_class: Option<Rc<Class<'a>>>,
pub fields: VTable<'a, Field<'a>>,
pub methods: VTable<'a, Method<'a>>,
pub span: Span,
}
impl<'a> Class<'a> {
fn new(
name: &'a Ident<'a>,
super_class_name: &'a Ident<'a>,
fields: VTable<'a, Field<'a>>,
span: Span,
) -> Self {
Self {
name,
fields,
super_class_name,
super_class: None,
methods: VTable::new(),
span,
}
}
pub fn get_method_named(
&self,
method_name: &'a str,
call_site: Span,
) -> Result<'a, &Method<'a>> {
let method = self.methods.get(method_name);
if let Some(method) = method {
return Ok(method);
}
if let Some(super_class) = &self.super_class {
// TODO: Change method name of returned error
// Otherwise it'll always be "Object"
return super_class.get_method_named(method_name, call_site);
}
Err(Error::UndefinedMethod {
class: &self.name.name,
method: method_name,
span: call_site,
})
}
}
#[derive(Debug, Eq, PartialEq, Hash)]
pub struct Field<'a> {
pub name: &'a Ident<'a>,
}
#[derive(Debug)]
pub struct Method<'a> {
pub name: &'a Ident<'a>,
pub parameters: &'a Vec<ast::Parameter<'a>>,
pub body: &'a Vec<ast::Stmt<'a>>,
pub span: Span,
}
// TODO: Bring back
// #[cfg(test)]
// mod test {
// #[allow(unused_imports)]
// use super::*;
// use crate::{lex::lex, parse::parse};
// #[test]
// fn finds_classes_and_methods() {
// let program = r#"
// [User def: #foo do: || { return 123; }];
// [Class subclass name: #User fields: [#id]];
// "#;
// let tokens = lex(&program).unwrap();
// let ast = parse(&tokens).unwrap();
// let classes = find_classes_and_methods(&ast).unwrap();
// let class = classes.get("User").unwrap();
// assert_eq!("User", class.name.name);
// assert_eq!(
// vec!["id"],
// class
// .fields
// .values()
// .map(|v| v.name.name)
// .collect::<Vec<_>>()
// );
// assert_eq!(vec![&"id"], class.fields.keys().collect::<Vec<_>>());
// assert_eq!(
// vec!["foo"],
// class
// .methods
// .values()
// .map(|v| v.name.name)
// .collect::<Vec<_>>()
// );
// assert_eq!(vec![&"foo"], class.methods.keys().collect::<Vec<_>>());
// }
// #[test]
// fn errors_if_class_is_defined_twice() {
// let program = r#"
// [Class subclass name: #User fields: [#foo]];
// [Class subclass name: #User fields: [#bar]];
// "#;
// let tokens = lex(&program).unwrap();
// let ast = parse(&tokens).unwrap();
// let result = find_classes_and_methods(&ast);
// assert_error!(result, Error::ClassAlreadyDefined { .. });
// }
// #[test]
// fn errors_if_method_is_defined_twice() {
// let program = r#"
// [Class subclass name: #User fields: [#foo]];
// [User def: #foo do: || { return 1; }];
// [User def: #foo do: || { return 2; }];
// "#;
// let tokens = lex(&program).unwrap();
// let ast = parse(&tokens).unwrap();
// let result = find_classes_and_methods(&ast);
// assert_error!(result, Error::MethodAlreadyDefined { .. });
// }
// #[test]
// fn errors_if_you_define_methods_on_classes_that_dont_exist() {
// let program = r#"
// [User def: #foo do: || { return 1; }];
// "#;
// let tokens = lex(&program).unwrap();
// let ast = parse(&tokens).unwrap();
// let result = find_classes_and_methods(&ast);
// assert_error!(result, Error::ClassNotDefined { .. });
// }
// }
| true |
611cb8859515989e081216c117599e4bbeb2b45f
|
Rust
|
Alligator/advent-of-code-2019
|
/day-3/src/main.rs
|
UTF-8
| 4,923 | 3.390625 | 3 |
[] |
no_license
|
use std::fs;
use std::collections::{HashSet, HashMap};
type Point = (i64, i64);
fn get_points(path: &[&str]) -> (HashSet<Point>, HashMap<Point, i64>) {
let mut current_point: Point = (0, 0);
let mut points = HashSet::<Point>::new();
let mut steps = HashMap::new();
let mut total_steps = 0;
points.insert(current_point);
for item in path {
let count = &item[1..].parse::<i64>().unwrap();
let mut x_diff = 0;
let mut y_diff = 0;
if item.starts_with("R") {
x_diff = 1;
y_diff = 0;
} else if item.starts_with("L") {
x_diff = -1;
y_diff = 0;
} else if item.starts_with("U") {
x_diff = 0;
y_diff = -1;
} else if item.starts_with("D") {
x_diff = 0;
y_diff = 1;
}
for i in 0..=*count {
let point = (current_point.0 + (i * x_diff), current_point.1 + (i * y_diff));
points.insert(point);
if !steps.contains_key(&point) {
steps.insert(point, total_steps);
}
total_steps += 1;
}
// take off the extra step we counted
total_steps -= 1;
current_point = (current_point.0 + (*count * x_diff), current_point.1 + (*count * y_diff));
}
return (points, steps);
}
fn get_closest_distance(path1: &[&str], path2: &[&str]) -> i64 {
let (points1, _steps1) = get_points(&path1);
let (points2, _steps2) = get_points(&path2);
return points1
.intersection(&points2)
.copied()
.filter(|p| !(p.0 == 0 && p.1 == 0))
.map(|p| (p.0.abs() + p.1.abs()))
.min()
.unwrap();
}
fn get_closest_steps(path1: &[&str], path2: &[&str]) -> i64 {
let (points1, steps1) = get_points(&path1);
let (points2, steps2) = get_points(&path2);
return points1
.intersection(&points2)
.copied()
.filter(|p| !(p.0 == 0 && p.1 == 0))
.map(|p| steps1.get(&p).unwrap() + steps2.get(&p).unwrap())
.min()
.unwrap();
}
fn main() {
let src = fs::read_to_string("input.txt").unwrap();
let mut lines = src
.split_whitespace()
.map(|x| x.split(",").collect());
let path1: Vec<&str> = lines.next().unwrap();
let path2: Vec<&str> = lines.next().unwrap();
let point = get_closest_distance(&path1, &path2);
println!("part1: {}", point);
let steps = get_closest_steps(&path1, &path2);
println!("part2: {}", steps);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_points() {
let path = ["R8","U5","L5","D3"];
let (points, _steps) = get_points(&path);
let mut expected_points = HashSet::new();
// R8
expected_points.insert((0, 0));
expected_points.insert((1, 0));
expected_points.insert((2, 0));
expected_points.insert((3, 0));
expected_points.insert((4, 0));
expected_points.insert((5, 0));
expected_points.insert((6, 0));
expected_points.insert((7, 0));
expected_points.insert((8, 0));
// U5
expected_points.insert((8, -1));
expected_points.insert((8, -2));
expected_points.insert((8, -3));
expected_points.insert((8, -4));
expected_points.insert((8, -5));
// L5
expected_points.insert((7, -5));
expected_points.insert((6, -5));
expected_points.insert((5, -5));
expected_points.insert((4, -5));
expected_points.insert((3, -5));
// D3
expected_points.insert((3, -4));
expected_points.insert((3, -3));
expected_points.insert((3, -2));
assert_eq!(points, expected_points);
}
#[test]
fn test_get_closest_point_1() {
let path1 = ["R8","U5","L5","D3"];
let path2 = ["U7","R6","D4","L4"];
assert_eq!(get_closest_distance(&path1, &path2), 6);
}
#[test]
fn test_get_closest_point_2() {
let path1 = ["R75","D30","R83","U83","L12","D49","R71","U7","L72"];
let path2 = ["U62","R66","U55","R34","D71","R55","D58","R83"];
assert_eq!(get_closest_distance(&path1, &path2), 159);
}
#[test]
fn test_get_closest_point_3() {
let path1 = ["R98","U47","R26","D63","R33","U87","L62","D20","R33","U53","R51"];
let path2 = ["U98","R91","D20","R16","D67","R40","U7","R15","U6","R7"];
assert_eq!(get_closest_distance(&path1, &path2), 135);
}
#[test]
fn test_get_closest_point_cross_at_zero() {
let path1 = ["R10"];
let path2 = ["U5", "R5", "D5"];
assert_eq!(get_closest_distance(&path1, &path2), 5);
}
#[test]
fn test_get_closest_steps() {
let path1 = ["R8","U5","L5","D3"];
let path2 = ["U7","R6","D4","L4"];
assert_eq!(get_closest_steps(&path1, &path2), 30);
}
}
| true |
067ed15e7ce5e7faef7377ca7365d5e219952858
|
Rust
|
tatamiya/actix-web-tutorial
|
/application/src/main.rs
|
UTF-8
| 1,897 | 2.890625 | 3 |
[] |
no_license
|
use actix_web::{get, web, App, HttpServer, HttpResponse, Responder};
use std::sync::Mutex;
async fn index() -> impl Responder {
"Hello world!"
}
struct AppState {
app_name: String,
}
#[get("/")]
async fn state(data: web::Data<AppState>) -> String {
let app_name = &data.app_name;
format!("Hello {}!", app_name)
}
struct AppStateWithCounter {
counter: Mutex<i32>,
}
async fn shared_mutable_state(data: web::Data<AppStateWithCounter>) -> String {
let mut counter = data.counter.lock().unwrap();
*counter += 1;
format!("Request number: {}", counter)
}
fn scoped_config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("/test")
.route(web::get().to(|| HttpResponse::Ok().body("test")))
.route(web::head().to(|| HttpResponse::MethodNotAllowed())),
);
}
fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("/hoge")
.route(web::get().to(|| HttpResponse::Ok().body("hoge")))
.route(web::head().to(|| HttpResponse::MethodNotAllowed())),
);
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let counter = web::Data::new(AppStateWithCounter {
counter: Mutex::new(0),
});
HttpServer::new(move || {
App::new()
.data(AppState {
app_name: String::from("Actix-web"),
})
.service(
web::scope("/app")
.route("/index.html", web::get().to(index)),
)
.service(state)
.app_data(counter.clone())
.route("/shared_mutable_state.html", web::get().to(shared_mutable_state))
.configure(config)
.service(web::scope("/api").configure(scoped_config))
.route("/", web::get().to(|| HttpResponse::Ok().body("/")))
})
.bind("127.0.0.1:8080")?
.run()
.await
}
| true |
6e2ddde408de01c3b7658bb7f51fd7d02461260d
|
Rust
|
greendwin/rust_ray
|
/src/world/scene.rs
|
UTF-8
| 1,026 | 2.953125 | 3 |
[] |
no_license
|
use crate::math::*;
pub trait LightDecl {
fn orig(&self) -> Vec3;
fn radius(&self) -> f64;
fn color_at(&self, pt: Vec3) -> Vec3;
}
pub trait Scene {
type Mat: Material;
type Obj: HitRay<Self::Mat>;
type Light: LightDecl;
fn objs(&self) -> &[Self::Obj];
fn lights(&self) -> &[Self::Light];
}
impl<Scn> HitRay<Scn::Mat> for Scn
where
Scn: Scene,
{
fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<(Hit, <Self as Scene>::Mat)> {
let mut closest_hit = None;
let mut cur_t_max = t_max;
for obj in self.objs() {
if let Some((hit, mat)) = obj.hit(ray, t_min, cur_t_max) {
cur_t_max = hit.t;
closest_hit.replace((hit, mat));
}
}
// for lgt in self.lights() {
// if let Some((hit, mat)) = lgt.hit(ray, t_min, cur_t_max) {
// cur_t_max = hit.t;
// closest_hit.replace((hit, mat));
// }
// }
closest_hit
}
}
| true |
88b5dc70fbe177e0ddcaf20e7e09dbffb4d5971f
|
Rust
|
KanchiShimono/rust-rocket-juniper-graphql-example
|
/src/db/repository.rs
|
UTF-8
| 3,012 | 2.96875 | 3 |
[] |
no_license
|
use crate::db::{
models::{Person, PersonWithPosts, Post},
Db,
};
use crate::graphql::schema::{CreatePersonInput, CreatePostInput};
use chrono::Utc;
use diesel::result::Error;
use uuid::Uuid;
pub trait PersonRepository {
fn find_all(&self) -> Result<Vec<Person>, Error>;
fn find_by_id(&self, id: Uuid) -> Result<Person, Error>;
fn save(&self, input: CreatePersonInput) -> Result<Person, Error>;
fn delete(&self, id: Uuid) -> Result<Person, Error>;
}
pub trait PostRepository {
fn find_all(&self) -> Result<Vec<Post>, Error>;
fn find_by_id(&self, id: Uuid) -> Result<Post, Error>;
fn find_by_person_id(&self, person_id: Uuid) -> Result<Vec<Post>, Error>;
fn save(&self, input: CreatePostInput) -> Result<Post, Error>;
fn delete(&self, id: Uuid) -> Result<Post, Error>;
}
pub trait PersonWithPostsRepository {
fn find_all(&self) -> Result<Vec<PersonWithPosts>, Error>;
fn find_by_id(&self, id: Uuid) -> Result<PersonWithPosts, Error>;
}
pub struct PgPersonRepository {
pub conn: Db,
}
impl PersonRepository for PgPersonRepository {
fn find_all(&self) -> Result<Vec<Person>, Error> {
Person::find_all(&self.conn)
}
fn find_by_id(&self, id: Uuid) -> Result<Person, Error> {
Person::find_by_id(&self.conn, id)
}
fn save(&self, input: CreatePersonInput) -> Result<Person, Error> {
let now = Utc::now().naive_utc();
let new_person = Person {
id: Uuid::new_v4(),
name: input.name,
create_at: now,
update_at: now,
};
Person::save(&self.conn, new_person)
}
fn delete(&self, id: Uuid) -> Result<Person, Error> {
Person::delete(&self.conn, id)
}
}
pub struct PgPostRepository {
pub conn: Db,
}
impl PostRepository for PgPostRepository {
fn find_all(&self) -> Result<Vec<Post>, Error> {
Post::find_all(&self.conn)
}
fn find_by_id(&self, id: Uuid) -> Result<Post, Error> {
Post::find_by_id(&self.conn, id)
}
fn find_by_person_id(&self, person_id: Uuid) -> Result<Vec<Post>, Error> {
Post::find_by_person_id(&self.conn, person_id)
}
fn save(&self, input: CreatePostInput) -> Result<Post, Error> {
let now = Utc::now().naive_utc();
let new_post = Post {
id: Uuid::new_v4(),
person_id: input.person_id,
text: input.text,
create_at: now,
update_at: now,
};
Post::save(&self.conn, new_post)
}
fn delete(&self, id: Uuid) -> Result<Post, Error> {
Post::delete(&self.conn, id)
}
}
pub struct PgPersonWithPostsRepository {
pub conn: Db,
}
impl PersonWithPostsRepository for PgPersonWithPostsRepository {
fn find_all(&self) -> Result<Vec<PersonWithPosts>, Error> {
PersonWithPosts::find_all(&self.conn)
}
fn find_by_id(&self, id: Uuid) -> Result<PersonWithPosts, Error> {
PersonWithPosts::find_by_id(&self.conn, id)
}
}
| true |
064df29d49d4b6c65284677b2f90fad30d23de45
|
Rust
|
lovoror/ruffle
|
/core/src/avm1/globals/color.rs
|
UTF-8
| 7,142 | 2.765625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
//! Color object
//!
//! TODO: This should change when `ColorTransform` changes to match Flash's representation
//! (See GitHub #193)
use crate::avm1::property::Attribute::*;
use crate::avm1::return_value::ReturnValue;
use crate::avm1::{Avm1, Error, Object, ScriptObject, TObject, UpdateContext, Value};
use crate::display_object::{DisplayObject, TDisplayObject};
use enumset::EnumSet;
use gc_arena::MutationContext;
pub fn constructor<'gc>(
avm: &mut Avm1<'gc>,
context: &mut UpdateContext<'_, 'gc, '_>,
mut this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<ReturnValue<'gc>, Error> {
// The target display object that this color will modify.
let target = args.get(0).cloned().unwrap_or(Value::Undefined);
// Set undocumented `target` property
this.set("target", target, avm, context)?;
this.set_attributes(
context.gc_context,
Some("target"),
DontDelete | ReadOnly | DontEnum,
EnumSet::empty(),
);
Ok(Value::Undefined.into())
}
pub fn create_proto<'gc>(
gc_context: MutationContext<'gc, '_>,
proto: Object<'gc>,
fn_proto: Object<'gc>,
) -> Object<'gc> {
let mut object = ScriptObject::object(gc_context, Some(proto));
object.force_set_function(
"getRGB",
get_rgb,
gc_context,
DontDelete | ReadOnly | DontEnum,
Some(fn_proto),
);
object.force_set_function(
"getTransform",
get_transform,
gc_context,
DontDelete | ReadOnly | DontEnum,
Some(fn_proto),
);
object.force_set_function(
"setRGB",
set_rgb,
gc_context,
DontDelete | ReadOnly | DontEnum,
Some(fn_proto),
);
object.force_set_function(
"setTransform",
set_transform,
gc_context,
DontDelete | ReadOnly | DontEnum,
Some(fn_proto),
);
object.into()
}
/// Gets the target display object of this color transform.
fn target<'gc>(
avm: &mut Avm1<'gc>,
context: &mut UpdateContext<'_, 'gc, '_>,
this: Object<'gc>,
) -> Result<Option<DisplayObject<'gc>>, Error> {
// The target path resolves based on the active tellTarget clip of the stack frame.
// This means calls on the same `Color` object could set the color of different clips
// depending on which timeline its called from!
let target = this.get("target", avm, context)?.resolve(avm, context)?;
let start_clip = avm.target_clip_or_root(context);
avm.resolve_target_display_object(context, start_clip, target)
}
fn get_rgb<'gc>(
avm: &mut Avm1<'gc>,
context: &mut UpdateContext<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<ReturnValue<'gc>, Error> {
if let Some(target) = target(avm, context, this)? {
let color_transform = target.color_transform();
let r = ((color_transform.r_add * 255.0) as i32) << 16;
let g = ((color_transform.g_add * 255.0) as i32) << 8;
let b = (color_transform.b_add * 255.0) as i32;
Ok((r | g | b).into())
} else {
Ok(Value::Undefined.into())
}
}
fn get_transform<'gc>(
avm: &mut Avm1<'gc>,
context: &mut UpdateContext<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<ReturnValue<'gc>, Error> {
if let Some(target) = target(avm, context, this)? {
let color_transform = target.color_transform();
let out = ScriptObject::object(context.gc_context, Some(avm.prototypes.object));
out.set("ra", (color_transform.r_mult * 100.0).into(), avm, context)?;
out.set("ga", (color_transform.g_mult * 100.0).into(), avm, context)?;
out.set("ba", (color_transform.b_mult * 100.0).into(), avm, context)?;
out.set("aa", (color_transform.a_mult * 100.0).into(), avm, context)?;
out.set("rb", (color_transform.r_add * 255.0).into(), avm, context)?;
out.set("gb", (color_transform.g_add * 255.0).into(), avm, context)?;
out.set("bb", (color_transform.b_add * 255.0).into(), avm, context)?;
out.set("ab", (color_transform.a_add * 255.0).into(), avm, context)?;
Ok(out.into())
} else {
Ok(Value::Undefined.into())
}
}
fn set_rgb<'gc>(
avm: &mut Avm1<'gc>,
context: &mut UpdateContext<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<ReturnValue<'gc>, Error> {
if let Some(target) = target(avm, context, this)? {
let mut color_transform = target.color_transform_mut(context.gc_context);
let rgb = args
.get(0)
.unwrap_or(&Value::Undefined)
.as_number(avm, context)? as i32;
let r = (((rgb >> 16) & 0xff) as f32) / 255.0;
let g = (((rgb >> 8) & 0xff) as f32) / 255.0;
let b = ((rgb & 0xff) as f32) / 255.0;
color_transform.r_mult = 0.0;
color_transform.g_mult = 0.0;
color_transform.b_mult = 0.0;
color_transform.r_add = r;
color_transform.g_add = g;
color_transform.b_add = b;
}
Ok(Value::Undefined.into())
}
fn set_transform<'gc>(
avm: &mut Avm1<'gc>,
context: &mut UpdateContext<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<ReturnValue<'gc>, Error> {
if let Some(target) = target(avm, context, this)? {
let mut color_transform = target.color_transform_mut(context.gc_context);
if let Ok(transform) = args.get(0).unwrap_or(&Value::Undefined).as_object() {
color_transform.r_mult = transform
.get("ra", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 100.0;
color_transform.g_mult = transform
.get("ga", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 100.0;
color_transform.b_mult = transform
.get("ba", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 100.0;
color_transform.a_mult = transform
.get("aa", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 100.0;
color_transform.r_add = transform
.get("rb", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 255.0;
color_transform.g_add = transform
.get("gb", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 255.0;
color_transform.b_add = transform
.get("bb", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 255.0;
color_transform.a_add = transform
.get("ab", avm, context)?
.resolve(avm, context)?
.as_number(avm, context)? as f32
/ 255.0;
}
}
Ok(Value::Undefined.into())
}
| true |
1ff027f567eab8468cc0b753954b9ce6074aa5e1
|
Rust
|
oli-obk/regex
|
/src/input.rs
|
UTF-8
| 3,213 | 3.0625 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-other-permissive",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ops;
use char::Char;
use prefix::Prefix;
/// Represents a location in the input.
#[derive(Clone, Copy, Debug)]
pub struct InputAt {
pos: usize,
c: Char,
len: usize,
}
impl InputAt {
/// Returns true iff this position is at the beginning of the input.
pub fn is_beginning(&self) -> bool {
self.pos == 0
}
/// Returns the character at this position.
///
/// If this position is just before or after the input, then an absent
/// character is returned.
pub fn char(&self) -> Char {
self.c
}
/// Returns the UTF-8 width of the character at this position.
pub fn len(&self) -> usize {
self.len
}
/// Returns the byte offset of this position.
pub fn pos(&self) -> usize {
self.pos
}
/// Returns the byte offset of the next position in the input.
pub fn next_pos(&self) -> usize {
self.pos + self.len
}
}
/// An abstraction over input used in the matching engines.
pub trait Input {
/// Return an encoding of the position at byte offset `i`.
fn at(&self, i: usize) -> InputAt;
/// Return an encoding of the char position just prior to byte offset `i`.
fn previous_at(&self, i: usize) -> InputAt;
/// Scan the input for a matching prefix.
fn prefix_at(&self, prefixes: &Prefix, at: InputAt) -> Option<InputAt>;
}
/// An input reader over characters.
///
/// (This is the only implementation of `Input` at the moment.)
#[derive(Debug)]
pub struct CharInput<'t>(&'t str);
impl<'t> CharInput<'t> {
/// Return a new character input reader for the given string.
pub fn new(s: &'t str) -> CharInput<'t> {
CharInput(s)
}
}
impl<'t> ops::Deref for CharInput<'t> {
type Target = str;
fn deref(&self) -> &str {
self.0
}
}
impl<'t> Input for CharInput<'t> {
// This `inline(always)` increases throughput by almost 25% on the `hard`
// benchmarks over a normal `inline` annotation.
//
// I'm not sure why `#[inline]` isn't enough to convince LLVM, but it is
// used *a lot* in the guts of the matching engines.
#[inline(always)]
fn at(&self, i: usize) -> InputAt {
let c = self[i..].chars().next().into();
InputAt {
pos: i,
c: c,
len: c.len_utf8(),
}
}
fn previous_at(&self, i: usize) -> InputAt {
let c: Char = self[..i].chars().rev().next().into();
let len = c.len_utf8();
InputAt {
pos: i - len,
c: c,
len: len,
}
}
fn prefix_at(&self, prefixes: &Prefix, at: InputAt) -> Option<InputAt> {
prefixes.find(&self[at.pos()..]).map(|(s, _)| self.at(at.pos() + s))
}
}
| true |
920e392cc3bce508f48a5c9d863fe40c23fa0cd6
|
Rust
|
mrsekut/rytl
|
/src/lexer/lexer.rs
|
UTF-8
| 3,770 | 3.40625 | 3 |
[] |
no_license
|
use crate::lexer::{LexerError, Loc, Token};
// fn recognize_many(input: &[u8], mut pos: usize, mut f: impl FnMut(u8) -> bool) -> usize {
// while pos < input.len() && f(input[pos]) {
// println!("pos: {:?}", pos);
// println!("len: {:?}", input.len());
// pos += 1;
// }
// pos
// }
pub fn lexer(input: &str) -> Result<Vec<Token>, LexerError> {
let mut tokens = Vec::new();
let input = input.as_bytes();
let mut pos = 0;
macro_rules! lex_a_token {
($token_method:ident, $pos:ident) => {{
tokens.push(Token::$token_method(Loc(pos, pos + 1)));
pos = $pos + 1;
}};
}
while pos < input.len() {
match input[pos] {
b'0'...b'9' => {
// TODO: clean
use std::str::from_utf8;
let start = pos;
while pos < input.len() && b"1234567890".contains(&input[pos]) {
pos += 1;
}
let n = from_utf8(&input[start..pos]).unwrap().parse().unwrap();
tokens.push(Token::number(n, Loc(start, pos)));
// let end = recognize_many(input, pos, |b| b"0123456789".contains(&b));
// let n = from_utf8(&input[start..end]).unwrap().parse().unwrap();
// tokens.push(Token::number(n, Loc(start, end)));
}
b'a'...b'z' => {
// TODO: clean
use std::str::from_utf8;
let start = pos;
// let end = recognize_many(input, start, |b| b"abcdefghijklmnopqrstuvwxyz".contains(&b));
// let s = from_utf8(&input[start..end]).unwrap();
// tokens.push(Token::var(s, Loc(start, end)));
while pos < input.len() && b"abcdefghijklmnopqrstuvwxyz".contains(&input[pos]) {
pos += 1;
}
let s = from_utf8(&input[start..pos]).unwrap();
tokens.push(Token::var(s, Loc(start, pos)));
}
b'+' => lex_a_token!(plus, pos),
b'-' => lex_a_token!(minus, pos),
b'*' => lex_a_token!(asterisk, pos),
b'/' => lex_a_token!(slash, pos),
b'(' => lex_a_token!(lparen, pos),
b')' => lex_a_token!(rparen, pos),
b':' => {
// TODO: clean
let start = pos;
// let end = recognize_many(input, start, |b| b"=".contains(&b));
// tokens.push(Token::bind(Loc(start, end)));
// FIXME:
while pos < input.len() && b":=".contains(&input[pos]) {
pos += 1;
}
tokens.push(Token::bind(Loc(start, pos)));
}
b' ' | b'\n' | b'\t' => {
pos = pos + 1;
}
b => return Err(LexerError::invalid_char(b as char, Loc(pos, pos + 1))),
}
}
Ok(tokens)
}
#[test]
fn test_lexer() {
assert_eq!(
lexer("12 + (3 - 123) * 3 / 4"),
Ok(vec![
Token::number(12, Loc(0, 2)),
Token::plus(Loc(3, 4)),
Token::lparen(Loc(5, 6)),
Token::number(3, Loc(6, 7)),
Token::minus(Loc(8, 9)),
Token::number(123, Loc(10, 13)),
Token::rparen(Loc(13, 14)),
Token::asterisk(Loc(15, 16)),
Token::number(3, Loc(17, 18)),
Token::slash(Loc(19, 20)),
Token::number(4, Loc(21, 22)),
])
)
}
#[test]
fn test_bind_lexer() {
assert_eq!(
lexer("hoge := 42"),
Ok(vec![
Token::var("hoge", Loc(0, 4)),
Token::bind(Loc(5, 7)),
Token::number(42, Loc(8, 10)),
])
)
}
| true |
d895644361ccf54127a89cc741d41f70e7bd15c7
|
Rust
|
18616378431/myCode
|
/rust/test8-39/src/main.rs
|
UTF-8
| 458 | 3.34375 | 3 |
[] |
no_license
|
//字符串回顾
fn main() {
let s = r"1234
5678
9876
4321";
let (mut x, mut y) = (0, 0);
for (idx, val) in s.lines().enumerate() {
let val = val.trim();
let left = val.get(idx..idx+1).unwrap().parse::<u32>().unwrap();
let right = val.get((3 - idx)..(3 - idx + 1)).unwrap().parse::<u32>().unwrap();
x += left;
y += right;
}
assert_eq!(38, x + y);
}
| true |
2e70401732620bdd078944c51688846ee5ce0c5c
|
Rust
|
mattmahn/rosetta-code
|
/tasks/cum-stdev/rust/src/main.rs
|
UTF-8
| 673 | 3.6875 | 4 |
[
"Unlicense"
] |
permissive
|
pub struct CumulativeStandardDeviation {
n: f64,
sum: f64,
sum_sq: f64
}
impl CumulativeStandardDeviation {
pub fn new() -> Self {
CumulativeStandardDeviation {
n: 0.,
sum: 0.,
sum_sq: 0.
}
}
fn push(&mut self, x: f64) -> f64 {
self.n += 1.;
self.sum += x;
self.sum_sq += x * x;
(self.sum_sq / self.n - self.sum * self.sum / self.n / self.n).sqrt()
}
}
fn main() {
let nums = [2, 4, 4, 4, 5, 5, 7, 9];
let mut cum_stdev = CumulativeStandardDeviation::new();
for num in nums.iter() {
println!("{}", cum_stdev.push(*num as f64));
}
}
| true |
7f22d79324bc0b01611a15a97f1b7d6345cd459d
|
Rust
|
Farooq-azam-khan/rust-practice
|
/borrow2.rs
|
UTF-8
| 230 | 3.546875 | 4 |
[] |
no_license
|
fn main() {
let mut s1 = String::from("Hello");
let len = string_len(&mut s1);
println!("{} has {} chars", s1, len);
}
fn string_len(word: &mut String) -> usize {
word.push_str(", world");
word.len()
}
| true |
31e5c200fdaa6adf66b9dde168b61241044651cd
|
Rust
|
silverweed/ecsde
|
/ecs_game/src/spatial.rs
|
UTF-8
| 13,139 | 2.546875 | 3 |
[] |
no_license
|
use inle_alloc::temp::*;
use inle_app::app::Engine_State;
use inle_ecs::ecs_world::{Ecs_World, Entity, Evt_Entity_Destroyed};
use inle_events::evt_register::{with_cb_data, wrap_cb_data, Event_Callback_Data};
use inle_math::vector::Vec2f;
use inle_physics::collider::C_Collider;
use inle_physics::phys_world::{Collider_Handle, Physics_World};
use inle_physics::spatial::Spatial_Accelerator;
use std::cmp::Ordering;
use std::collections::HashMap;
#[cfg(debug_assertions)]
use {inle_debug::painter::Debug_Painter, std::collections::HashSet};
// @Speed: tune these numbers
const CHUNK_WIDTH: f32 = 200.;
const CHUNK_HEIGHT: f32 = 200.;
#[derive(Default, Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct Chunk_Coords {
pub x: i32,
pub y: i32,
}
impl PartialOrd for Chunk_Coords {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Chunk_Coords {
fn cmp(&self, other: &Self) -> Ordering {
match self.y.cmp(&other.y) {
Ordering::Greater => Ordering::Greater,
Ordering::Less => Ordering::Less,
Ordering::Equal => self.x.cmp(&other.x),
}
}
}
impl Chunk_Coords {
pub fn from_pos(pos: Vec2f) -> Self {
Self {
x: (pos.x / CHUNK_WIDTH).floor() as i32,
y: (pos.y / CHUNK_HEIGHT).floor() as i32,
}
}
pub fn to_world_pos(self) -> Vec2f {
Vec2f {
x: self.x as f32 * CHUNK_WIDTH,
y: self.y as f32 * CHUNK_HEIGHT,
}
}
}
pub struct World_Chunks {
chunks: HashMap<Chunk_Coords, World_Chunk>,
to_destroy: Event_Callback_Data,
}
#[derive(Default, Debug)]
pub struct World_Chunk {
pub colliders: Vec<Collider_Handle>,
}
impl World_Chunks {
pub fn new() -> Self {
Self {
chunks: HashMap::new(),
to_destroy: wrap_cb_data(Vec::<Entity>::new()),
}
}
pub fn init(&mut self, engine_state: &mut Engine_State) {
engine_state
.systems
.evt_register
.subscribe::<Evt_Entity_Destroyed>(
Box::new(|entity, to_destroy| {
with_cb_data(to_destroy.unwrap(), |to_destroy: &mut Vec<Entity>| {
to_destroy.push(entity);
});
}),
Some(self.to_destroy.clone()),
);
}
pub fn update(&mut self, ecs_world: &Ecs_World, phys_world: &Physics_World) {
trace!("world_chunks::update");
let mut to_remove = vec![];
with_cb_data(&mut self.to_destroy, |to_destroy: &mut Vec<Entity>| {
for &entity in to_destroy.iter() {
if let Some(collider) = ecs_world.get_component::<C_Collider>(entity) {
for (cld, handle) in
phys_world.get_all_colliders_with_handles(collider.phys_body_handle)
{
to_remove.push((handle, cld.position, cld.shape.extent()));
}
}
}
to_destroy.clear();
});
for (cld, pos, extent) in to_remove {
self.remove_collider(cld, pos, extent);
}
}
pub fn n_chunks(&self) -> usize {
self.chunks.len()
}
pub fn add_collider(&mut self, cld_handle: Collider_Handle, pos: Vec2f, extent: Vec2f) {
let mut chunks = vec![];
self.get_all_chunks_containing(pos, extent, &mut chunks);
for coords in chunks {
self.add_collider_coords(cld_handle, coords);
}
}
fn add_collider_coords(&mut self, cld_handle: Collider_Handle, coords: Chunk_Coords) {
let chunk = self
.chunks
.entry(coords)
.or_insert_with(World_Chunk::default);
debug_assert!(
!chunk.colliders.contains(&cld_handle),
"Duplicate collider {:?} in chunk {:?}!",
cld_handle,
coords
);
chunk.colliders.push(cld_handle);
}
pub fn remove_collider(&mut self, cld_handle: Collider_Handle, pos: Vec2f, extent: Vec2f) {
let mut chunks = vec![];
self.get_all_chunks_containing(pos, extent, &mut chunks);
for coords in chunks {
self.remove_collider_coords(cld_handle, coords);
}
}
fn remove_collider_coords(&mut self, cld_handle: Collider_Handle, coords: Chunk_Coords) {
let chunk = self.chunks.get_mut(&coords).unwrap_or_else(|| {
fatal!(
"Collider {:?} should be in chunk {:?}, but that chunk does not exist.",
cld_handle,
coords
)
});
let idx = chunk.colliders.iter().position(|&c| c == cld_handle);
if let Some(idx) = idx {
chunk.colliders.remove(idx);
if chunk.colliders.is_empty() {
self.chunks.remove(&coords);
}
} else {
lerr!(
"Collider {:?} not found in expected chunk {:?}.",
cld_handle,
coords
);
}
}
pub fn update_collider(
&mut self,
cld_handle: Collider_Handle,
prev_pos: Vec2f,
new_pos: Vec2f,
extent: Vec2f,
frame_alloc: &mut Temp_Allocator,
) {
trace!("world_chunks::update_collider");
let mut prev_coords = excl_temp_array(frame_alloc);
self.get_all_chunks_containing(prev_pos, extent, &mut prev_coords);
let prev_coords = unsafe { prev_coords.into_read_only() };
let mut new_coords = excl_temp_array(frame_alloc);
self.get_all_chunks_containing(new_pos, extent, &mut new_coords);
let new_coords = unsafe { new_coords.into_read_only() };
let mut all_chunks = excl_temp_array(frame_alloc);
// Pre-allocate enough memory to hold all the chunks; then `chunks_to_add` starts at index 0,
// while `chunks_to_remove` starts at index `new_coords.len()`.
// This works because we can have at most `new_coords.len()` chunks to add and `prev_coords.len()`
// chunks to remove.
unsafe {
all_chunks.alloc_additional_uninit(new_coords.len() + prev_coords.len());
}
let mut n_chunks_to_add = 0;
let mut n_chunks_to_remove = 0;
let chunks_to_add_offset = 0;
let chunks_to_remove_offset = new_coords.len();
// Find chunks to add and to remove in O(n).
// This algorithm assumes that both prev_coords and new_coords are sorted and deduped.
let mut p_idx = 0;
let mut n_idx = 0;
while p_idx < prev_coords.len() && n_idx < new_coords.len() {
let pc = prev_coords[p_idx];
let nc = new_coords[n_idx];
match pc.cmp(&nc) {
Ordering::Less => {
all_chunks[chunks_to_remove_offset + n_chunks_to_remove] = pc;
n_chunks_to_remove += 1;
p_idx += 1;
}
Ordering::Greater => {
all_chunks[chunks_to_add_offset + n_chunks_to_add] = nc;
n_chunks_to_add += 1;
n_idx += 1;
}
Ordering::Equal => {
p_idx += 1;
n_idx += 1;
}
}
}
if p_idx < prev_coords.len() {
let diff = prev_coords.len() - p_idx;
for i in 0..diff {
all_chunks[chunks_to_remove_offset + n_chunks_to_remove + i] =
prev_coords[p_idx + i];
}
n_chunks_to_remove += diff;
} else if n_idx < new_coords.len() {
let diff = new_coords.len() - n_idx;
for i in 0..diff {
all_chunks[chunks_to_add_offset + n_chunks_to_add + i] = new_coords[n_idx + i];
}
n_chunks_to_add += diff;
}
#[cfg(debug_assertions)]
{
let to_remove = all_chunks
.iter()
.cloned()
.skip(chunks_to_remove_offset)
.take(n_chunks_to_remove)
.collect::<HashSet<_>>();
let to_add = all_chunks
.iter()
.cloned()
.skip(chunks_to_add_offset)
.take(n_chunks_to_add)
.collect::<HashSet<_>>();
debug_assert_eq!(to_remove.intersection(&to_add).count(), 0);
}
for coord in all_chunks
.iter()
.skip(chunks_to_add_offset)
.take(n_chunks_to_add)
{
self.add_collider_coords(cld_handle, *coord);
}
for coord in all_chunks
.iter()
.skip(chunks_to_remove_offset)
.take(n_chunks_to_remove)
{
self.remove_collider_coords(cld_handle, *coord);
}
}
fn get_all_chunks_containing<T>(&self, pos: Vec2f, extent: Vec2f, coords: &mut T)
where
T: Extend<Chunk_Coords>,
{
trace!("get_all_chunks_containing");
#[cfg(debug_assertions)]
let mut chk_coords = vec![];
// We need to @Cleanup the -extent*0.5 offset we need to apply and make it consistent throughout the game!
let pos = pos - extent * 0.5;
let coords_topleft = Chunk_Coords::from_pos(pos);
coords.extend(Some(coords_topleft));
#[cfg(debug_assertions)]
chk_coords.push(coords_topleft);
let coords_botright = Chunk_Coords::from_pos(pos + extent);
// Note: we cycle y-major so the result is automatically sorted (as for Chunk_Coords::cmp)
for y in 0..=coords_botright.y - coords_topleft.y {
for x in 0..=coords_botright.x - coords_topleft.x {
if x == 0 && y == 0 {
continue;
}
coords.extend(Some(Chunk_Coords::from_pos(
pos + v2!(x as f32 * CHUNK_WIDTH, y as f32 * CHUNK_HEIGHT),
)));
#[cfg(debug_assertions)]
chk_coords.push(Chunk_Coords::from_pos(
pos + v2!(x as f32 * CHUNK_WIDTH, y as f32 * CHUNK_HEIGHT),
));
}
}
#[cfg(debug_assertions)]
{
// Result should be sorted and deduped
// @WaitForStable
//debug_assert!(coords.iter().is_sorted());
for i in 1..chk_coords.len() {
debug_assert!(chk_coords[i] > chk_coords[i - 1]);
}
let mut deduped = chk_coords.clone();
deduped.dedup();
debug_assert!(chk_coords.len() == deduped.len());
}
}
}
impl Spatial_Accelerator<Collider_Handle> for World_Chunks {
fn get_neighbours<R>(&self, pos: Vec2f, extent: Vec2f, result: &mut R)
where
R: Extend<Collider_Handle>,
{
let mut chunks = vec![];
self.get_all_chunks_containing(pos, extent, &mut chunks);
for coords in chunks {
if let Some(chunk) = self.chunks.get(&coords) {
result.extend(chunk.colliders.iter().copied());
}
}
}
}
#[cfg(debug_assertions)]
impl World_Chunks {
pub fn debug_draw(&self, painter: &mut Debug_Painter) {
use inle_common::colors;
use inle_common::paint_props::Paint_Properties;
use inle_math::transform::Transform2D;
if self.chunks.is_empty() {
return;
}
let max_colliders = self
.chunks
.iter()
.map(|(_, chk)| chk.colliders.len())
.max()
.unwrap_or(0) as f32;
for (coords, chunk) in &self.chunks {
let world_pos = v2!(coords.to_world_pos().x, coords.to_world_pos().y);
let col = colors::lerp_col(
colors::rgba(0, 150, 0, 100),
colors::rgba(150, 0, 0, 100),
chunk.colliders.len() as f32 / max_colliders,
);
painter.add_rect(
v2!(CHUNK_WIDTH, CHUNK_HEIGHT),
&Transform2D::from_pos(world_pos),
Paint_Properties {
color: col,
border_color: colors::darken(col, 0.7),
border_thick: (CHUNK_WIDTH / 50.).max(5.),
..Default::default()
},
);
painter.add_text(
&format!("{},{}: {}", coords.x, coords.y, chunk.colliders.len()),
world_pos + v2!(10., 5.),
(CHUNK_WIDTH as u16 / 10).max(20),
colors::rgba(50, 220, 0, 250),
);
}
}
}
#[cfg(tests)]
mod tests {
use super::*;
#[test]
fn chunk_coords_ord() {
assert!(Chunk_Coords { x: 0, y: 0 } < Chunk_Coords { x: 1, y: 0 });
assert!(Chunk_Coords { x: 1, y: 0 } < Chunk_Coords { x: 0, y: 1 });
assert!(Chunk_Coords { x: 1, y: 1 } < Chunk_Coords { x: 2, y: 1 });
assert!(Chunk_Coords { x: 2, y: 1 } < Chunk_Coords { x: 1, y: 2 });
}
}
| true |
a9a5c4c9549a95c9da984863b8cf7e28092d624e
|
Rust
|
baitcenter/sled
|
/crates/sled/src/node.rs
|
UTF-8
| 7,492 | 2.859375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::{fmt, mem::size_of};
use super::*;
#[derive(Clone, PartialEq, Serialize, Deserialize)]
pub(crate) struct Node {
pub(crate) data: Data,
pub(crate) next: Option<PageId>,
pub(crate) lo: IVec,
pub(crate) hi: IVec,
pub(crate) merging_child: Option<PageId>,
pub(crate) merging: bool,
}
impl fmt::Debug for Node {
fn fmt(
&self,
f: &mut fmt::Formatter<'_>,
) -> std::result::Result<(), fmt::Error> {
let data = self.data.fmt_keys(&self.lo);
write!(
f,
"Node {{ \
lo: {:?} \
hi: {:?} \
next: {:?} \
merging_child: {:?} \
merging: {} \
data: {:?} }}",
self.lo, self.hi, self.next, self.merging_child, self.merging, data
)
}
}
impl Node {
#[inline]
pub(crate) fn size_in_bytes(&self) -> u64 {
let self_sz = size_of::<Self>() as u64;
let lo_sz = self.lo.size_in_bytes();
let hi_sz = self.hi.size_in_bytes();
let data_sz = self.data.size_in_bytes();
self_sz
.saturating_add(lo_sz)
.saturating_add(hi_sz)
.saturating_add(data_sz)
}
pub(crate) fn apply(&mut self, frag: &Frag, merge_operator: Option<usize>) {
use self::Frag::*;
assert!(
!self.merging,
"somehow a frag was applied to a node after it was merged"
);
match *frag {
Set(ref k, ref v) => {
// (when hi is empty, it means it's unbounded)
if self.hi.is_empty()
|| prefix_cmp_encoded(k, &self.hi, &self.lo)
== std::cmp::Ordering::Less
{
self.set_leaf(k.clone(), v.clone());
} else {
panic!(
"tried to consolidate set at key <= hi.\
Set({:?}, {:?}) to node {:?}",
k, v, self
)
}
}
Merge(ref k, ref v) => {
// (when hi is empty, it means it's unbounded)
if self.hi.is_empty()
|| prefix_cmp_encoded(k, &self.hi, &self.lo)
== std::cmp::Ordering::Less
{
let merge_fn_ptr =
merge_operator.expect("must have a merge operator set");
unsafe {
let merge_fn: MergeOperator =
std::mem::transmute(merge_fn_ptr);
self.merge_leaf(k.clone(), v.clone(), merge_fn);
}
} else {
panic!("tried to consolidate set at key <= hi")
}
}
Del(ref k) => {
// (when hi is empty, it means it's unbounded)
if self.hi.is_empty()
|| prefix_cmp_encoded(k, &self.hi, &self.lo)
== std::cmp::Ordering::Less
{
self.del_leaf(k);
} else {
panic!("tried to consolidate del at key <= hi")
}
}
Base(_) => panic!("encountered base page in middle of chain"),
ParentMergeIntention(pid) => {
assert!(
self.merging_child.is_none(),
"trying to merge {:?} into node {:?} which \
is already merging another child",
frag,
self
);
self.merging_child = Some(pid);
}
ParentMergeConfirm => {
assert!(self.merging_child.is_some());
let merged_child = self.merging_child.take().expect(
"we should have a specific \
child that was merged if this \
frag appears here",
);
self.data.parent_merge_confirm(merged_child);
}
ChildMergeCap => {
self.merging = true;
}
}
}
pub(crate) fn set_leaf(&mut self, key: IVec, val: IVec) {
if let Data::Leaf(ref mut records) = self.data {
let search = records.binary_search_by(|(k, _)| prefix_cmp(k, &key));
match search {
Ok(idx) => records[idx] = (key, val),
Err(idx) => records.insert(idx, (key, val)),
}
} else {
panic!("tried to Set a value to an index");
}
}
pub(crate) fn merge_leaf(
&mut self,
key: IVec,
val: IVec,
merge_fn: MergeOperator,
) {
if let Data::Leaf(ref mut records) = self.data {
let search = records.binary_search_by(|(k, _)| prefix_cmp(k, &key));
let decoded_k = prefix_decode(&self.lo, &key);
match search {
Ok(idx) => {
let new =
merge_fn(&*decoded_k, Some(&records[idx].1), &val);
if let Some(new) = new {
records[idx] = (key, new.into());
} else {
records.remove(idx);
}
}
Err(idx) => {
let new = merge_fn(&*decoded_k, None, &val);
if let Some(new) = new {
records.insert(idx, (key, new.into()));
}
}
}
} else {
panic!("tried to Merge a value to an index");
}
}
pub(crate) fn parent_split(&mut self, at: &[u8], to: PageId) -> bool {
if let Data::Index(ref mut ptrs) = self.data {
let encoded_sep = prefix_encode(&self.lo, at);
match ptrs.binary_search_by(|a| prefix_cmp(&a.0, &encoded_sep)) {
Ok(_) => {
debug!(
"parent_split skipped because \
parent already contains child at split point \
due to deep race"
);
return false;
}
Err(idx) => ptrs.insert(idx, (encoded_sep, to)),
}
} else {
panic!("tried to attach a ParentSplit to a Leaf chain");
}
true
}
pub(crate) fn del_leaf(&mut self, key: &IVec) {
if let Data::Leaf(ref mut records) = self.data {
let search = records
.binary_search_by(|&(ref k, ref _v)| prefix_cmp(k, &*key));
if let Ok(idx) = search {
records.remove(idx);
}
} else {
panic!("tried to attach a Del to an Index chain");
}
}
pub(crate) fn split(&self) -> Node {
let (split, right_data) = self.data.split(&self.lo);
Node {
data: right_data,
next: self.next,
lo: split,
hi: self.hi.clone(),
merging_child: None,
merging: false,
}
}
pub(crate) fn receive_merge(&self, rhs: &Node) -> Node {
let mut merged = self.clone();
merged.hi = rhs.hi.clone();
merged.data.receive_merge(
rhs.lo.as_ref(),
merged.lo.as_ref(),
&rhs.data,
);
merged.next = rhs.next;
merged
}
}
| true |
f90234ba82ae6210d218545be97354487e2a964a
|
Rust
|
lianhuiwang/tokamak
|
/src/parser2/error_handler.rs
|
UTF-8
| 3,425 | 3.046875 | 3 |
[
"Apache-2.0"
] |
permissive
|
extern crate term;
pub use self::Level::*;
use std::cell::Cell;
use std::fmt;
use codemap::{MultiSpan};
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
// An error which while not immediately fatal, should stop the compiler
// progressing beyond the current phase.
PhaseFatal,
Error,
Warning,
Note,
Help,
Cancelled,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.to_str().fmt(f)
}
}
impl Level {
#[allow(dead_code)]
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED,
Warning => term::color::YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
Cancelled => unreachable!(),
}
}
#[allow(dead_code)]
fn to_str(self) -> &'static str {
match self {
Bug => "error: internal compiler error",
Fatal | PhaseFatal | Error => "error",
Warning => "warning",
Note => "note",
Help => "help",
Cancelled => panic!("Shouldn't call on cancelled error"),
}
}
}
#[derive(Clone)]
pub struct DiagnosticBuilder;
impl DiagnosticBuilder {
/// Emit the diagnostic.
pub fn emit(&mut self) {
unimplemented!()
}
/// Cancel the diagnostic (a structured diagnostic must either be emitted or
/// cancelled or it will panic when dropped).
/// BEWARE: if this DiagnosticBuilder is an error, then creating it will
/// bump the error count on the Handler and cancelling it won't undo that.
/// If you want to decrement the error count you should use `Handler::cancel`.
pub fn cancel(&mut self) {
unimplemented!()
}
#[allow(unused_variables)]
pub fn note(&mut self, msg: &str) -> &mut DiagnosticBuilder {
unimplemented!()
}
#[allow(unused_variables)]
pub fn span_note<S: Into<MultiSpan>>(&mut self,
sp: S,
msg: &str)
-> &mut DiagnosticBuilder {
unimplemented!()
}
#[allow(unused_variables)]
pub fn help(&mut self , msg: &str) -> &mut DiagnosticBuilder {
unimplemented!()
}
#[allow(unused_variables)]
pub fn span_help<S: Into<MultiSpan>>(&mut self,
sp: S,
msg: &str)
-> &mut DiagnosticBuilder {
unimplemented!()
}
}
pub struct Handler {
pub err_count: Cell<usize>,
}
impl Handler {
#[allow(unused_variables)]
pub fn cancel(&mut self, err: &mut DiagnosticBuilder) {
unimplemented!()
}
#[allow(unused_variables)]
pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
unimplemented!()
}
#[allow(unused_variables)]
pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
unimplemented!()
}
#[allow(unused_variables)]
pub fn span_bug_no_panic<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
unimplemented!()
}
#[allow(unused_variables)]
pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str)
-> DiagnosticBuilder {
unimplemented!()
}
#[allow(unused_variables)]
pub fn struct_span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str)
-> DiagnosticBuilder {
println!("{}", msg);
unimplemented!()
}
pub fn bug(&self, msg: &str) -> ! {
unimplemented!()
}
}
| true |
93f7788948110b906462bcc5836954c931544ec6
|
Rust
|
thomashk0/chip8-rs
|
/packages/chip8/src/screen.rs
|
UTF-8
| 1,604 | 2.96875 | 3 |
[
"MIT"
] |
permissive
|
type Point2i = (i32, i32);
pub const CHIP8_FB_W: usize = 64;
pub const CHIP8_FB_H: usize = 32;
pub type Chip8Fb = [u32; CHIP8_FB_W * CHIP8_FB_H];
#[derive(Clone)]
pub struct Screen {
inverted_y: bool,
fb: Chip8Fb,
width: u32,
height: u32,
}
impl Screen {
pub fn new() -> Self {
Screen {
inverted_y: true,
fb: [0; CHIP8_FB_W * CHIP8_FB_H],
width: CHIP8_FB_W as u32,
height: CHIP8_FB_H as u32,
}
}
pub fn set_inverted_y(&mut self, b: bool) {
self.inverted_y = b;
}
fn px_index(&self, coords: Point2i) -> usize {
let h = self.height - 1;
let y = if self.inverted_y { h - coords.1 as u32 } else { coords.1 as u32 };
let k = y * self.width + (coords.0 as u32);
k as usize
}
pub fn data(&self) -> &[u32] {
&self.fb
}
/// Get (width, height)
pub fn dims(&self) -> (u32, u32) {
(self.width, self.height)
}
pub fn width(&self) -> u32 {
self.width
}
pub fn height(&self) -> u32 {
self.height
}
pub fn set_pixel(&mut self, coords: Point2i, value: u32) {
let k = (coords.1 as u32) * self.width + coords.0 as u32;
self.fb[k as usize] = value;
}
pub fn xor_pixel(&mut self, coords: Point2i, value: u32) -> bool {
let k = self.px_index(coords);
let old_px = self.fb[k as usize];
self.fb[k] ^= value;
old_px != 0 && value != 0
}
pub fn clear(&mut self, color: u32) {
self.fb.iter_mut().for_each(|x| *x = color);
}
}
| true |
08582e72c9372fcf9c746d54461d48e31f4f26ce
|
Rust
|
Rantanen/intercom
|
/intercom-common/src/idents.rs
|
UTF-8
| 1,586 | 2.53125 | 3 |
[
"MIT"
] |
permissive
|
use crate::prelude::*;
use crate::tyhandlers::ModelTypeSystem;
use syn::{Ident, Path};
pub trait SomeIdent
{
fn get_some_ident(&self) -> Option<Ident>;
}
impl SomeIdent for Path
{
fn get_some_ident(&self) -> Option<Ident>
{
self.get_ident()
.cloned()
.or_else(|| self.segments.last().map(|l| l.ident.clone()))
}
}
pub fn vtable(itf: &Ident, ts: ModelTypeSystem) -> Path
{
let vtable_ident = format_ident!("__{}{}VTable", itf, ts);
parse_quote!(#vtable_ident)
}
pub fn com_to_rust_method_impl(itf: &Ident, method: &Ident, ts: ModelTypeSystem) -> Ident
{
Ident::new(&format!("__{}_{}_{:?}", itf, method, ts), method.span())
}
pub fn with_ts(ident: &Ident, ts: ModelTypeSystem) -> Ident
{
Ident::new(&format!("{}_{:?}", ident, ts), Span::call_site())
}
pub fn clsid_path(struct_path: &Path) -> Path
{
let mut clsid_path = struct_path.clone();
if let Some(mut last) = clsid_path.segments.last_mut() {
last.ident = clsid(&last.ident);
}
clsid_path
}
pub fn clsid(struct_name: &Ident) -> Ident
{
new_ident(&format!("CLSID_{}", struct_name))
}
pub fn iid(itf_name: &Ident, span: Span) -> Ident
{
Ident::new(&format!("IID_{}", itf_name), span)
}
pub fn method_impl<TMethod: std::fmt::Display>(
struct_ident: &Ident,
itf_ident: &Ident,
method_name: TMethod,
ts: ModelTypeSystem,
) -> Ident
{
new_ident(&format!(
"__{}_{}_{}_{:?}",
struct_ident, itf_ident, method_name, ts
))
}
fn new_ident(s: &str) -> Ident
{
Ident::new(s, Span::call_site())
}
| true |
2b1ec48934f11906d2a70c25108b5d203b2d566c
|
Rust
|
media-io/rs_mpegts
|
/src/mpegts/packet.rs
|
UTF-8
| 1,784 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
use mpegts::adaptation_field::AdaptationField;
use mpegts::payload::Payload;
use mpegts::program_association::*;
use mpegts::program_map::*;
#[derive(Debug, Clone)]
pub struct Packet {
pub transport_error_indicator: bool,
pub transport_priority: bool,
pub program_id: u16,
pub transport_scrambling_control: u8,
pub continuity_counter: u8,
pub payload_presence: bool,
pub adaptation_field: Option<AdaptationField>,
pub payload: Option<Payload>,
pub data: Vec<u8>,
}
impl fmt::Display for Packet {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.data.len() > 0 {
write!(f, "Packet with PID: {:04} (data size = {}), payload {:?}", self.program_id, self.data.len(), self.payload)
} else {
// write!(f, "PID: {:04}", self.program_id)
write!(f, "Packet: {:?}", self)
}
}
}
impl Packet {
pub fn new() -> Packet {
Packet {
transport_error_indicator: false,
transport_priority: false,
program_id: 0,
transport_scrambling_control: 0x00,
continuity_counter: 0x00,
payload_presence: false,
adaptation_field: None,
payload: None,
data: vec![],
}
}
pub fn new_pat(pat: ProgramAssociation) -> Packet {
let mut p = Packet::new();
p.payload_presence = true;
p.payload = Some(
Payload{
pat: Some(pat),
pmt: None,
pes: None,
});
p
}
pub fn new_pmt(id: u16, pmt: ProgramMap) -> Packet {
let mut p = Packet::new();
p.program_id = id;
p.payload_presence = true;
p.payload = Some(
Payload{
pat: None,
pmt: Some(pmt),
pes: None,
});
p
}
pub fn new_null() -> Packet {
let mut p = Packet::new();
p.program_id = 0x1FFF;
p
}
}
| true |
243e5b5dc8a7c1e8c4264239519675eecbe1de69
|
Rust
|
l4l/whos-online
|
/src/whosb.rs
|
UTF-8
| 1,289 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
use telebot::bot;
use telebot::functions::*;
use reqwest::get;
use serde_json::from_str;
use tokio_core::reactor::Core;
use futures::stream::Stream;
use status::Map;
const NO_USERS: &'static str = "No users found";
const FETCH_ERROR: &'static str = "Nothing found";
fn fetch(host: &str) -> Option<Map> {
get(host).and_then(|mut x| x.text()).ok().and_then(|resp| {
from_str(&resp).ok()
})
}
fn print_all(map: Map) -> String {
let s = map.into_iter()
.map(|(k, v)| {
let data_prnt = v.map(|d| format!("online [{}]", d.description)).unwrap_or(
"offline".to_string(),
);
format!("{} is {}", k, data_prnt)
})
.collect::<String>();
match s.is_empty() {
true => NO_USERS.to_string(),
_ => s,
}
}
pub fn launch(token: &str, host: &str) {
let mut lp = Core::new().unwrap();
let host = host.to_string();
let bot = bot::RcBot::new(lp.handle(), &token).update_interval(200);
let handle = bot.new_cmd("/ask").and_then(move |(bot, msg)| {
let text = fetch(&host).map(print_all).unwrap_or(
FETCH_ERROR.to_string(),
);
bot.message(msg.chat.id, text).send()
});
bot.register(handle);
bot.run(&mut lp).unwrap();
}
| true |
9630129249a991ab73ddf3c7036ef4f26756f1f8
|
Rust
|
yujie21ic/datacannon-rs-core
|
/src/message_structure/kafka_queue.rs
|
UTF-8
| 342 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! Structure and functions storing data for when kafka acts as a queue
//!
//! ---
//! author: Andrew Evans
//! ---
/// Creates a kafka structure
///
/// # Arguments
/// * `queue` - Name of the queue
/// * `default_exchange` - Exchange for te
pub struct KafkaQueue{
queue: String,
default_exchange: String,
ha_policy: Option<i8>,
}
| true |
1e325050de63d568e99722f04c3a955399bd8841
|
Rust
|
lilydjwg/chinese-num
|
/src/lib.rs
|
UTF-8
| 4,224 | 3.375 | 3 |
[] |
no_license
|
//! Convert a decimal number to its Chinese form.
//!
//! [](https://travis-ci.org/lilydjwg/chinese-num)
//! [](https://crates.io/crates/chinese-num)
//! [](https://github.com/lilydjwg/chinese-num)
//!
//!
//! # Examples
//!
//! ```
//! let s = chinese_num::to_chinese_num("121").unwrap();
//! assert_eq!(s, "一百二十一");
//! ```
//!
//! ```
//! let s = chinese_num::to_chinese_num("1004000007000500").unwrap();
//! assert_eq!(s, "一千零四万亿零七百万零五百");
//! ```
//!
//! ```
//! let s = chinese_num::to_chinese_num("123000520").unwrap();
//! assert_eq!(s, "一亿二千三百万零五百二十");
//! ```
//!
//! ```
//! let s = chinese_num::to_chinese_num("1234070000123780000087006786520988800000").unwrap();
//! assert_eq!(s, "一千二百三十四万零七百亿零一十二万三千七百八十亿零八千七百亿六千七百八十六万五千二百零九亿八千八百八十万");
//! ```
//!
//! If the given string is not a number, or begins with "0", `None` is returned:
//!
//! ```
//! let s = chinese_num::to_chinese_num("不是数字");
//! assert!(s.is_none());
//! ```
//!
//! ```
//! let s = chinese_num::to_chinese_num("020");
//! assert!(s.is_none());
//! ```
//!
//! The algorithm is taken from here:
//! http://zhuanlan.zhihu.com/iobject/20370983.
const DIGITS: [char; 10] = ['零', '一', '二', '三', '四', '五', '六', '七', '八', '九'];
const TENS_NAME: [char; 4] = ['个', '十', '百', '千'];
const UNIT_RANK: [char; 6] = ['个', '十', '百', '千', '万', '亿'];
fn digit_pos_to_name(pos: usize) -> char {
if pos == 0 {
'个'
} else if pos % 8 == 0 {
'亿'
} else if pos % 4 == 0 {
'万'
} else {
TENS_NAME[pos % 4]
}
}
struct ResultS (String, bool, char);
#[inline]
fn get_unit_rank(u: char) -> usize {
UNIT_RANK.iter().position(|&x| x == u).unwrap()
}
fn append_digit(result: ResultS, tuple: (usize, char)) -> ResultS {
let (digit, this_unit) = tuple;
let ResultS(mut result, pending_zero, last_unit) = result;
let this_str = DIGITS[digit];
if digit == 0 {
if get_unit_rank(last_unit) > get_unit_rank(this_unit) {
ResultS(result, true, last_unit)
} else {
result.push(this_unit);
ResultS(result, false, this_unit)
}
} else {
if pending_zero {
result.push('零');
}
result.push(this_str);
result.push(this_unit);
ResultS(result, false, this_unit)
}
}
pub fn to_chinese_num<N: AsRef<str>>(n: N) -> Option<String> {
let n = n.as_ref();
// special cases
if n == "0" {
return Some("零".to_owned());
}
// non-digit found, nothing, leading zeros
if !n.chars().all(|x| x.is_digit(10)) || n.len() == 0
|| n.chars().nth(0).unwrap() == '0' {
return None;
}
let v = n.as_bytes().iter().rev().enumerate().map(
|(i, c)| ((c - '0' as u8) as usize, digit_pos_to_name(i)))
.rev().fold(ResultS(String::new(), false, '个'), append_digit);
let mut r = v.0;
if r.chars().last().unwrap() == '个' {
r.pop();;
}
if r.starts_with("一十") {
r.remove(0);
}
Some(r)
}
/// A trait adding a `to_chinese_num` method to types, e.g.:
///
/// ```
/// use chinese_num::ToChineseNum;
///
/// assert_eq!(20.to_chinese_num(), Some(String::from("二十")));
/// ```
pub trait ToChineseNum {
fn to_chinese_num(&self) -> Option<String>;
}
impl ToChineseNum for usize {
fn to_chinese_num(&self) -> Option<String> {
to_chinese_num(self.to_string())
}
}
#[test]
fn empty_number() {
let s = to_chinese_num("");
assert!(s.is_none());
}
#[test]
fn num_0() {
let s = to_chinese_num("0").unwrap();
assert_eq!(s, "零");
}
#[test]
fn num_1() {
let s = to_chinese_num("1").unwrap();
assert_eq!(s, "一");
}
#[test]
fn num_10() {
let s = to_chinese_num("10").unwrap();
assert_eq!(s, "十");
}
#[test]
fn num_12() {
let s = to_chinese_num("12").unwrap();
assert_eq!(s, "十二");
}
#[test]
fn num_20() {
let s = to_chinese_num("20").unwrap();
assert_eq!(s, "二十");
}
| true |
eeb9102e847eb34d29fdf87b2edbff4521ebf08e
|
Rust
|
synecdoche/pelikan
|
/src/rust/core/server/src/process/mod.rs
|
UTF-8
| 1,943 | 2.921875 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Copyright 2021 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
mod builder;
mod worker_builder;
pub use builder::ProcessBuilder;
pub use worker_builder::WorkerBuilder;
use common::signal::Signal;
use queues::QueuePairs;
use std::thread::JoinHandle;
/// A structure which represents a running Pelikan cache process.
///
/// Note: for long-running daemon, be sure to call `wait()` on this structure to
/// block the process until the threads terminate. For use within tests, be sure
/// to call `shutdown()` to terminate the threads and block until termination.
pub struct Process {
threads: Vec<JoinHandle<()>>,
/// used to send signals to and from the admin thread
signal_queue: QueuePairs<Signal, Signal>,
}
impl Process {
/// Attempts to gracefully shutdown the `Process` by sending a shutdown to
/// each thread and then waiting to join those threads.
///
/// Will terminate ungracefully if it encounters an error in sending a
/// shutdown to any of the threads.
///
/// This function will block until all threads have terminated.
pub fn shutdown(mut self) {
// this sends a shutdown to the admin thread, which will broadcast the
// signal to all sibling threads in the process
if self.signal_queue.broadcast(Signal::Shutdown).is_err() {
fatal!("error sending shutdown signal to thread");
}
// try to wake the admin thread to process the signal
if self.signal_queue.wake_all().is_err() {
error!("error waking threads for shutdown");
}
// wait and join all threads
self.wait()
}
/// Will block until all threads terminate. This should be used to keep the
/// process alive while the child threads run.
pub fn wait(self) {
for thread in self.threads {
let _ = thread.join();
}
}
}
| true |
7e165052c93b69c07fdd2bf4aa9ae7fd1c954796
|
Rust
|
bytebuddha/bevy_tiled_world
|
/examples/utils/colliders.rs
|
UTF-8
| 4,980 | 2.765625 | 3 |
[] |
no_license
|
use bevy::prelude::*;
use bevy_rapier2d::prelude::*;
pub fn draw_colliders(
mut lines: ResMut<bevy_prototype_debug_lines::DebugLines>,
query: Query<(&ColliderShape, &ColliderPosition)>,
) {
for (shape, position) in query.iter() {
match shape.0.as_typed_shape() {
TypedShape::Cuboid(cuboid) => {
let points = cuboid.to_polyline();
lines.line_colored(
Vec3::new(
position.translation.x + points[0][0],
position.translation.y - points[1][1],
10.0,
),
Vec3::new(
position.translation.x + points[1][0],
position.translation.y - points[1][1],
10.0,
),
0.0,
Color::RED,
);
lines.line_colored(
Vec3::new(
position.translation.x + points[1][0],
position.translation.y - points[1][1],
10.0,
),
Vec3::new(
position.translation.x + points[2][0],
position.translation.y - points[2][1],
10.0,
),
0.0,
Color::RED,
);
lines.line_colored(
Vec3::new(
position.translation.x + points[2][0],
position.translation.y - points[2][1],
10.0,
),
Vec3::new(
position.translation.x + points[3][0],
position.translation.y - points[3][1],
10.0,
),
0.0,
Color::RED,
);
lines.line_colored(
Vec3::new(
position.translation.x + points[3][0],
position.translation.y - points[3][1],
10.0,
),
Vec3::new(
position.translation.x + points[0][0],
position.translation.y - points[0][1],
10.0,
),
0.0,
Color::RED,
);
}
TypedShape::Polyline(polyline) => {
let segments: Vec<Segment> = polyline.segments().collect();
for segment in segments.iter() {
lines.line_colored(
Vec3::new(
position.translation.x + segment.a[0],
position.translation.y - segment.a[1],
10.0,
),
Vec3::new(
position.translation.x + segment.b[0],
position.translation.y - segment.b[1],
10.0,
),
0.0,
Color::RED,
);
}
lines.line_colored(
Vec3::new(
position.translation.x + segments[segments.len() - 1].a[0],
position.translation.y - segments[segments.len() - 1].a[1],
10.0,
),
Vec3::new(
position.translation.x + segments[0].a[0],
position.translation.y - segments[0].a[1],
10.0,
),
0.0,
Color::RED,
);
},
TypedShape::Ball(_) => {
lines.line_colored(
Vec3::new(
position.translation.x - 6.0,
position.translation.y - 6.0,
10.0,
),
Vec3::new(
position.translation.x + 6.0,
position.translation.y + 6.0,
10.0,
),
0.0,
Color::RED,
);
lines.line_colored(
Vec3::new(
position.translation.x + 6.0,
position.translation.y - 6.0,
10.0,
),
Vec3::new(
position.translation.x - 6.0,
position.translation.y + 6.0,
10.0,
),
0.0,
Color::RED,
);
}
_ => {}
}
}
}
| true |
25e954ff01d7d287a2e4baab75193f236fb3e22e
|
Rust
|
whitfin/efflux
|
/src/lib.rs
|
UTF-8
| 1,737 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
//! Efflux is a set of Rust interfaces for MapReduce and Hadoop Streaming.
//!
//! This crate provides easy interfaces for working with MapReduce, whether
//! or not you're running on the Hadoop platform. Usage is as simple as a
//! struct which implements either the `Mapper` or `Reducer` trait, as all
//! other interaction is taken care of internally.
//!
//! Macros are provided for IO, to provide a compile-time guarantee of things
//! such as counter/status updates, or writing to the Hadoop task logs.
#![doc(html_root_url = "https://docs.rs/efflux/2.0.1")]
#[macro_use]
pub mod macros;
pub mod context;
pub mod io;
pub mod mapper;
pub mod reducer;
use self::mapper::Mapper;
use self::reducer::Reducer;
use self::mapper::MapperLifecycle;
use self::reducer::ReducerLifecycle;
use self::io::run_lifecycle;
/// Executes a `Mapper` against the current `stdin`.
#[inline]
pub fn run_mapper<M>(mapper: M)
where
M: Mapper + 'static,
{
run_lifecycle(MapperLifecycle::new(mapper));
}
/// Executes a `Reducer` against the current `stdin`.
#[inline]
pub fn run_reducer<R>(reducer: R)
where
R: Reducer + 'static,
{
run_lifecycle(ReducerLifecycle::new(reducer));
}
// prelude module
pub mod prelude {
//! A "prelude" for crates using the `efflux` crate.
//!
//! This prelude contains the required imports for almost all use cases, to
//! avoid having to include modules and structures directly:
//!
//! ```rust
//! use efflux::prelude::*;
//! ```
//!
//! The prelude may grow over time, but it is unlikely to shrink.
pub use super::context::{Configuration, Context, Contextual};
pub use super::log;
pub use super::mapper::Mapper;
pub use super::reducer::Reducer;
}
| true |
429d6d641cbe8615aef22571401e5314aa4970b6
|
Rust
|
Woyten/fileserver
|
/src/main.rs
|
UTF-8
| 1,701 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
use iron::middleware::Handler;
use iron::mime::Mime;
use iron::prelude::*;
use iron::status::Status;
use staticfile::Static;
use std::fs;
use std::fs::ReadDir;
use std::path::Path;
use std::path::PathBuf;
static FILE_FOLDER: &str = "www";
fn main() {
let static_file_handler = Static::new(Path::new(FILE_FOLDER));
Iron::new(move |request: &mut Request| browse(request, &static_file_handler))
.http("0.0.0.0:3000")
.unwrap();
}
fn browse(request: &mut Request, static_file_handler: &Static) -> IronResult<Response> {
let file_response = static_file_handler.handle(request);
if file_response.is_ok() {
return file_response;
}
let mut path = PathBuf::new();
path.push(FILE_FOLDER);
for path_element in request.url.path() {
path.push(path_element);
}
match fs::read_dir(&path) {
Ok(paths) => list_paths(request, paths),
Err(_) => Ok(Response::with((Status::NotFound, "Invalid path"))),
}
}
fn list_paths(request: &Request, paths: ReadDir) -> IronResult<Response> {
let mut response = String::new();
response.push_str(&format!("<div>Content of {}</div>", request.url));
for path in paths {
let to_push = match path {
Ok(file) => format!(
r#"<a href="{0}">{0}</a>"#,
file.path().file_name().unwrap().to_str().unwrap()
),
Err(err) => format!("{}", err),
};
response.push_str(&format!("<div>{}\n</div>", to_push));
}
let mime: Mime = "text/html".parse().unwrap();
response = format!("<html><meta></meta><body>{}</body></html>", response);
Ok(Response::with((Status::Ok, response, mime)))
}
| true |
1b11341c511b87dd9b770d99be27b805aaba22e9
|
Rust
|
nikolabr/base64rs
|
/src/lib.rs
|
UTF-8
| 3,224 | 3.375 | 3 |
[] |
no_license
|
pub mod base64 {
static BASE64_TABLE: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
fn get_ascii(ch: u8) -> u8 {
match BASE64_TABLE.chars().position(|x| x == ch as char) {
None => 0,
Some(n) => n as u8
}
}
fn get_b64_char(val: u8) -> char {
match BASE64_TABLE.chars().nth((val & 0x3F) as usize) {
None => '0',
Some(x) => x
}
}
pub fn is_valid_b64(data: &String) -> bool {
if (data.len() == 0) || (data.len() % 4 != 0) {
return false
}
else {
let char_is_b64 = |x| { (0x30..=0x39).contains(x) || (0x41..=0x90).contains(x)
|| (0x61..=0x80).contains(x) || *x == 0x2F || *x == 0x2B || *x == 0x3D };
return data.as_bytes().iter().all(char_is_b64)
}
}
fn encode_chunk(chunk: &[u8]) -> [char; 4]{
let mut tmp : [char; 4] = ['0'; 4];
tmp[0] = get_b64_char(chunk[0] >> 2);
tmp[1] = get_b64_char((chunk[0] << 4) | (chunk[1] >> 4));
tmp[2] = get_b64_char((chunk[1] << 2) | (chunk[2] >> 6));
tmp[3] = get_b64_char(chunk[2]);
tmp
}
fn add_padding(chunk: &[u8]) -> [char; 4]{
let mut tmp : [char; 4] = ['0'; 4];
tmp[0] = get_b64_char(chunk[0] >> 2);
match chunk.len() {
1 => {
tmp[1] = get_b64_char((chunk[0] << 4) | 0x00);
tmp[2] = '=';
}
2 => {
tmp[1] = get_b64_char((chunk[0] << 4) | (chunk[1] >> 4));
tmp[2] = get_b64_char((chunk[1] << 2) | (chunk[2] >> 6));
}
_ => panic!("Invalid padding!")
};
tmp[3] = '=';
tmp
}
fn decode_chunk(chunk: &[u8]) -> Vec<u8>{
let mut tmp : Vec<u8> = Vec::new();
tmp.push((get_ascii(chunk[0]) << 2) | (get_ascii(chunk[1]) >> 4));
if chunk[2] != '=' as u8 {
tmp.push((get_ascii(chunk[1]) << 4) | (get_ascii(chunk[2]) >> 2));
if chunk[3] != '=' as u8 {
tmp.push((get_ascii(chunk[2]) << 6) | get_ascii(chunk[3]));
}
}
tmp
}
pub fn encode(data: &Vec<u8>) -> String {
let mut res = String::new();
let chunks = data.chunks_exact(3);
let remainder = chunks.remainder(); // For loop will consume the iterator, so the remainder must be copied
for chunk in chunks {
res.extend(encode_chunk(chunk));
}
if !remainder.is_empty() {
res.extend(add_padding(remainder));
}
res
}
pub fn decode(data: &String) -> Result<Vec<u8>, String> {
match is_valid_b64(data){
true => {
let mut res: Vec<u8> = Vec::new();
let bytes = data.as_bytes();
let chunks = bytes.chunks(4);
for chunk in chunks {
res.extend(decode_chunk(chunk));
}
return Ok(res);
},
false => {
return Err("Input is not Base64!".to_string());
}
};
}
}
| true |
810107b60d4368049d58d73dd702c5b8dc9f4921
|
Rust
|
arothstein/sandbox-rust
|
/the-book/03/shadowing/src/main.rs
|
UTF-8
| 798 | 4.375 | 4 |
[] |
no_license
|
fn main() {
let x = 5;
// We can shadow a variable by using the same variable’s name and repeating the use of the let keyword as follows:
let x = x + 1;
let x = x * 2;
println!("The value of x is: {}", x);
// Shadowing is different from marking a variable as mut, because we’ll get a compile-time error if we
// accidentally try to reassign to this variable without using the let keyword. By using let, we can perform
// a few transformations on a value but have the variable be immutable after those transformations have been completed.
// The other difference between mut and shadowing is that because we’re effectively creating a new
// variable when we use the let keyword again, we can change the type of the value but reuse the same name.
}
| true |
14e00dc3fb6c55fcec1bb334ebc82ba020b5af9b
|
Rust
|
EFanZh/n-body
|
/src/basic_renderer.rs
|
UTF-8
| 1,785 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
use crate::configuration::Color;
use crate::renderer::Renderer;
use cgmath::Vector2;
use itertools::izip;
use wasm_bindgen::JsValue;
use web_sys::CanvasRenderingContext2d;
pub struct BasicRenderer {
canvas_context: CanvasRenderingContext2d,
body_colors: Vec<String>,
trail_widths: Vec<f64>,
}
impl BasicRenderer {
pub fn new(
canvas_context: CanvasRenderingContext2d,
width: f64,
height: f64,
body_colors: Vec<Color>,
trail_widths: Vec<f64>,
) -> BasicRenderer {
canvas_context.set_global_composite_operation("screen").unwrap();
canvas_context.set_fill_style(&JsValue::from_str("black"));
canvas_context.fill_rect(-width * 0.5, -height * 0.5, width, height);
BasicRenderer {
canvas_context,
body_colors: body_colors.iter().map(|c| c.to_rgba()).collect(),
trail_widths,
}
}
}
impl Renderer for BasicRenderer {
fn render(&mut self, position_histories: &[Vec<Vector2<f64>>]) {
for (position_history, color, trail_width) in izip!(position_histories, &self.body_colors, &self.trail_widths) {
if position_history.len() > 1 {
self.canvas_context.set_stroke_style(&JsValue::from_str(&color));
self.canvas_context.set_line_width(*trail_width);
self.canvas_context.begin_path();
let (first_position, rest_positions) = position_history.split_first().unwrap();
self.canvas_context.move_to(first_position.x, first_position.y);
for position in rest_positions {
self.canvas_context.line_to(position.x, position.y);
}
self.canvas_context.stroke();
}
}
}
}
| true |
e23266534b9a41311fe49104eec99a6650a9eaea
|
Rust
|
Vicfred/codeforces-rust
|
/insomnia_cure_148A.rs
|
UTF-8
| 1,055 | 2.828125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
// https://codeforces.com/problemset/problem/148/A
// implementation, simulation, simple math
use std::io;
fn main() {
let mut k = String::new();
io::stdin()
.read_line(&mut k)
.unwrap();
let k: i64 = k.trim().parse().unwrap();
let mut l = String::new();
io::stdin()
.read_line(&mut l)
.unwrap();
let l: i64 = l.trim().parse().unwrap();
let mut m = String::new();
io::stdin()
.read_line(&mut m)
.unwrap();
let m: i64 = m.trim().parse().unwrap();
let mut n = String::new();
io::stdin()
.read_line(&mut n)
.unwrap();
let n: i64 = n.trim().parse().unwrap();
let mut d = String::new();
io::stdin()
.read_line(&mut d)
.unwrap();
let d: i64 = d.trim().parse().unwrap();
let mut dragons = 0;
for idx in 1..d+1 {
if idx % k == 0
|| idx % l == 0
|| idx % m == 0
|| idx % n == 0 {
dragons += 1;
}
}
println!("{}", dragons);
}
| true |
b31eb9662d953601504e039d375052a60afc0190
|
Rust
|
geom3trik/fft_resample
|
/examples/demo.rs
|
UTF-8
| 1,768 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use std::{error::Error, fs::File, io::BufReader, usize};
use fft_resample::fft_upsample;
use hound::{WavReader, WavSpec, SampleFormat, WavWriter};
fn main() -> Result<(), hound::Error> {
// Replace with path to test file
let path = "C:/Users/Setup/Music/file_example_WAV_5MG.wav";
let mut reader = WavReader::open(path)?;
let spec = reader.spec();
let mut data = Vec::with_capacity((spec.channels as usize) * (reader.duration() as usize));
match (spec.bits_per_sample, spec.sample_format) {
(16, SampleFormat::Int) => {
for sample in reader.samples::<i16>() {
data.push((sample? as f32) / (0x7fffi32 as f32));
}
}
(24, SampleFormat::Int) => {
for sample in reader.samples::<i32>() {
let val = (sample? as f32) / (0x00ff_ffffi32 as f32);
data.push(val);
}
}
(32, SampleFormat::Int) => {
for sample in reader.samples::<i32>() {
data.push((sample? as f32) / (0x7fff_ffffi32 as f32));
}
}
(32, SampleFormat::Float) => {
for sample in reader.samples::<f32>() {
data.push(sample?);
}
}
_ => return Err(hound::Error::Unsupported),
}
let upsample_length = (data.len() as f32 / 44100.0) * 48000.0;
let resampled_buffer = fft_upsample(&data, upsample_length.round() as usize, spec.channels as usize);
let mut writer = WavWriter::create("test3.wav", spec)?;
for t in 0..resampled_buffer.len() {
let sample = resampled_buffer[t];
let amplitude = i16::MAX as f32;
writer.write_sample((sample * amplitude) as i16)?;
}
writer.finalize()?;
Ok(())
}
| true |
4c05ac454e5ce39285e5543013d9bd27fae4b091
|
Rust
|
SeijiEmery/subliminal
|
/structural_parser/src/text_style.rs
|
UTF-8
| 641 | 3.5 | 4 |
[
"MIT"
] |
permissive
|
pub struct TextStyle {
font: Option<String>,
color: Option<Color>,
size: Option<f32>,
italic: Option<bool>,
bold: Option<bool>,
}
impl Default for TextStyle {
fn default () -> TextStyle {
TextStyle { font: None, color: None, size: None, italic: false, bold: false }
}
}
impl BitOr for TextStyle {
type Output = Self;
fn bitor (self, rhs: Self) -> Self {
TextStyle {
font: font.or(rhs.font),
color: color.or(rhs.font),
size: size.or(rhs.size),
italic: italic.or(rhs.italic),
bold: bold.or(rhs.bold),
}
}
}
| true |
77058d7ec4c60610a551e6b77abc64b8a98b6227
|
Rust
|
mchesser/platformer
|
/src/map.rs
|
UTF-8
| 2,903 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use std::{fs::File, io::Read, path::Path};
use anyhow::Context;
use macroquad::prelude::{Rect, Vec2};
use crate::tiles::{TileInfo, TileSet};
pub struct Map {
pub width: usize,
pub height: usize,
tiles: Vec<u16>,
tileset: TileSet,
}
impl Map {
/// Loads a map from a file
pub fn load_map(path: &Path, tileset: TileSet) -> anyhow::Result<Self> {
static VERSION: u8 = 1;
static MAGIC_ID: [u8; 3] = *b"MAP";
let mut file =
File::open(path).with_context(|| format!("failed to open: {}", path.display()))?;
let mut header = [0; 12];
// Load header into the buffer
match file.read(&mut header) {
Ok(n) if n == 12 => {}
_ => anyhow::bail!("Could not read file header"),
}
// Check the magic id
if &header[0..3] != &MAGIC_ID {
anyhow::bail!("Invalid magic id");
}
// Check the version number
if header[3] != VERSION {
anyhow::bail!("Invalid map version");
}
// Get the width and height of the map
let width = u32::from_le_bytes(header[4..8].try_into().unwrap()) as usize;
let height = u32::from_le_bytes(header[8..12].try_into().unwrap()) as usize;
// Read the tiles
let length = width * height * 2;
let mut tile_buffer = vec![0; length];
match file.read(&mut tile_buffer) {
Ok(n) if n == length => {}
Ok(n) => anyhow::bail!("Invalid number of tiles, expected: {length}, but found: {n}"),
_ => anyhow::bail!("Could not load map tiles"),
}
let tiles =
tile_buffer.chunks(2).map(|x| u16::from_le_bytes(x.try_into().unwrap())).collect();
Ok(Self { tiles, width, height, tileset })
}
pub fn size(&self) -> Vec2 {
let tile_size = self.tile_size() as f32;
Vec2::new(self.width as f32 * tile_size, self.height as f32 * tile_size)
}
pub fn tile_size(&self) -> i32 {
self.tileset.tile_size
}
pub fn tile_info_at(&self, x: usize, y: usize) -> TileInfo {
self.tileset.id(self.get(x, y))
}
fn get(&self, x: usize, y: usize) -> u16 {
assert!(x < self.width);
assert!(y < self.height);
self.tiles[x + y * self.width]
}
pub fn draw(&self, camera: Vec2) {
for tile_x in 0..self.width {
for tile_y in 0..self.height {
let x = (tile_x * self.tile_size() as usize) as f32;
let y = (tile_y * self.tile_size() as usize) as f32;
let dest_rect = Rect::new(
x - camera.x,
y - camera.y,
self.tile_size() as f32,
self.tile_size() as f32,
);
self.tileset.draw(self.get(tile_x, tile_y), dest_rect);
}
}
}
}
| true |
84b82fbb5622e37e3ebdfbf0163a4f74cbfcf4b0
|
Rust
|
HappyCodingRust/async_executors
|
/src/tokio_ct.rs
|
UTF-8
| 5,475 | 2.84375 | 3 |
[
"Unlicense"
] |
permissive
|
use
{
crate :: { SpawnHandle, LocalSpawnHandle, JoinHandle, join_handle::InnerJh } ,
std :: { sync::Arc, future::Future, sync::atomic::AtomicBool } ,
tokio :: { task::LocalSet, runtime::{ Runtime } } ,
futures_task :: { FutureObj, LocalFutureObj, Spawn, LocalSpawn, SpawnError } ,
};
/// An executor that uses a [`tokio::runtime::Runtime`] with the [current thread](tokio::runtime::Builder::new_current_thread)
/// and a [`tokio::task::LocalSet`]. Can spawn `!Send` futures.
///
/// ## Creation of the runtime
///
/// You must use [`TokioCtBuilder`](crate::TokioCtBuilder) to create the executor.
///
/// ```
/// // Make sure to set the `tokio_ct` feature on async_executors.
/// //
/// use
/// {
/// async_executors :: { TokioCt, TokioCtBuilder, LocalSpawnHandleExt } ,
/// tokio :: { runtime::Builder } ,
/// std :: { rc::Rc } ,
/// };
///
/// // You must use the builder. This guarantees that TokioCt is always backed by a single threaded runtime.
/// // You can set other configurations by calling `tokio_builder()` on TokioCtBuilder, so you get
/// // access to the `tokio::runtime::Builder`.
/// //
/// let exec = TokioCtBuilder::new().build().expect( "create tokio runtime" );
///
/// // block_on takes a &self, so if you need to `async move`,
/// // just clone it for use inside the async block.
/// //
/// exec.block_on( async
/// {
/// let not_send = async { let rc = Rc::new(()); };
///
/// // We can spawn !Send futures here.
/// //
/// let join_handle = exec.spawn_handle_local( not_send ).expect( "spawn" );
///
/// join_handle.await;
/// });
///```
///
/// ## Unwind Safety.
///
/// When a future spawned on this wrapper panics, the panic will be caught by tokio in the poll function.
///
/// You must only spawn futures to this API that are unwind safe. Tokio will wrap spawned tasks in
/// [`std::panic::AssertUnwindSafe`] and wrap the poll invocation with [`std::panic::catch_unwind`].
///
/// They reason that this is fine because they require `Send + 'static` on the task. As far
/// as I can tell this is wrong. Unwind safety can be circumvented in several ways even with
/// `Send + 'static` (eg. `parking_lot::Mutex` is `Send + 'static` but `!UnwindSafe`).
///
/// You should make sure that if your future panics, no code that lives on after the panic,
/// nor any destructors called during the unwind can observe data in an inconsistent state.
///
/// Note: the future running from within `block_on` as opposed to `spawn` does not exhibit this behavior and will panic
/// the current thread.
///
/// Note that these are logic errors, not related to the class of problems that cannot happen
/// in safe rust (memory safety, undefined behavior, unsoundness, data races, ...). See the relevant
/// [catch_unwind RFC](https://github.com/rust-lang/rfcs/blob/master/text/1236-stabilize-catch-panic.md)
/// and it's discussion threads for more info as well as the documentation of [std::panic::UnwindSafe]
/// for more information.
///
//
#[ derive( Debug, Clone ) ]
//
#[ cfg_attr( nightly, doc(cfg( feature = "tokio_ct" )) ) ]
//
pub struct TokioCt
{
pub(crate) exec : Arc< Runtime > ,
pub(crate) local : Arc< LocalSet > ,
}
impl TokioCt
{
/// This is the entry point for this executor. Once this call returns, no remaining tasks shall be polled anymore.
/// However the tasks stay in the executor, so if you make a second call to `block_on` with a new task, the older
/// tasks will start making progress again.
///
/// For simplicity, it's advised to just create top level task that you run through `block_on` and make sure your
/// program is done when it returns.
///
/// See: [tokio::runtime::Runtime::block_on]
///
/// ## Panics
///
/// This function will panic if it is called from an async context, including but not limited to making a nested
/// call. It will also panic if the provided future panics.
//
pub fn block_on< F: Future >( &self, f: F ) -> F::Output
{
self.exec.block_on( self.local.run_until( f ) )
}
}
impl Spawn for TokioCt
{
fn spawn_obj( &self, future: FutureObj<'static, ()> ) -> Result<(), SpawnError>
{
// We drop the JoinHandle, so the task becomes detached.
//
let _ = self.local.spawn_local( future );
Ok(())
}
}
impl LocalSpawn for TokioCt
{
fn spawn_local_obj( &self, future: LocalFutureObj<'static, ()> ) -> Result<(), SpawnError>
{
// We drop the JoinHandle, so the task becomes detached.
//
let _ = self.local.spawn_local( future );
Ok(())
}
}
impl<Out: 'static + Send> SpawnHandle<Out> for TokioCt
{
fn spawn_handle_obj( &self, future: FutureObj<'static, Out> ) -> Result<JoinHandle<Out>, SpawnError>
{
Ok( JoinHandle{ inner: InnerJh::Tokio
{
handle : self.exec.spawn( future ) ,
detached: AtomicBool::new( false ) ,
}})
}
}
impl<Out: 'static> LocalSpawnHandle<Out> for TokioCt
{
fn spawn_handle_local_obj( &self, future: LocalFutureObj<'static, Out> ) -> Result<JoinHandle<Out>, SpawnError>
{
Ok( JoinHandle{ inner: InnerJh::Tokio
{
handle : self.local.spawn_local( future ) ,
detached: AtomicBool::new( false ) ,
}})
}
}
#[ cfg(test) ]
//
mod tests
{
use super::*;
// It's important that this is not Send, as we allow spawning !Send futures on it.
//
static_assertions::assert_not_impl_any!( TokioCt: Send, Sync );
}
| true |
7e852c5803efe0d60fab202ff6f01b44360f7835
|
Rust
|
rovangju/vacuum-robot-simulator
|
/src/geometry/vector.rs
|
UTF-8
| 1,738 | 3.796875 | 4 |
[] |
no_license
|
use std::cmp;
use std::fmt;
use std::ops;
use math::{Angle, Scalar};
#[derive(Debug, Clone, Copy)]
pub struct Vector {
pub x: Scalar,
pub y: Scalar,
}
impl fmt::Display for Vector {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {})", self.x, self.y)
}
}
impl ops::Add for Vector {
type Output = Vector;
fn add(self, other: Vector) -> Vector {
Vector::new(self.x + other.x, self.y + other.y)
}
}
impl ops::Sub for Vector {
type Output = Vector;
fn sub(self, other: Vector) -> Vector {
Vector::new(self.x - other.x, self.y - other.y)
}
}
impl ops::Mul<Scalar> for Vector {
type Output = Vector;
fn mul(self, s: Scalar) -> Vector {
Vector::new(self.x * s, self.y * s)
}
}
impl cmp::PartialEq for Vector {
fn eq(&self, other: &Vector) -> bool {
self.x == other.x && self.y == other.y
}
}
impl Vector {
pub fn new(x: Scalar, y: Scalar) -> Vector {
Vector { x, y }
}
pub fn from_angle(angle: Angle) -> Vector {
// 0° is in forward direction (along Y-axis)
Vector {
x: -angle.sin(),
y: angle.cos(),
}
}
pub fn length(&self) -> Scalar {
(self.x.powi(2) + self.y.powi(2)).sqrt()
}
pub fn dot(&self, q: Vector) -> Scalar {
self.x * q.x + q.y * self.y
}
pub fn cross(&self, q: Vector) -> Scalar {
self.x * q.y - q.x * self.y
}
pub fn angle(&self) -> Scalar {
-self.x.atan2(self.y)
}
pub fn rotate(&self, angle: Angle) -> Vector {
let c = angle.cos();
let s = angle.sin();
Vector::new(c * self.x - s * self.y, s * self.x + c * self.y)
}
}
| true |
d36dd1e963fe5fee1c70aba9b5486e97a945cf83
|
Rust
|
Vanille-N/rask
|
/src/parse/mod.rs
|
UTF-8
| 2,833 | 2.671875 | 3 |
[
"MIT"
] |
permissive
|
mod build;
mod lex;
mod split;
mod util;
pub use build::build;
pub use lex::distribute_lex as lex;
pub use split::split;
use std::rc::Rc;
pub use util::*;
pub fn parse(src: &str) -> Vec<Result<Rc<Expr>, ParseErr>> {
let symbols = split(src);
if let Err(e) = symbols {
return vec![Err(e)];
}
let tokens = lex(&symbols.unwrap());
if let Err(e) = tokens {
return vec![Err(e)];
}
build(&tokens.unwrap())
}
#[cfg(test)]
#[cfg_attr(tarpaulin, skip)]
mod integrate {
const ASSETS: [&str; 9] = [
"sort",
"set-construct",
"word-count",
"printer",
"interprete",
"unification",
"timer",
"sprintf",
"matrix",
];
use super::*;
use crate::source;
#[test]
fn read_sources() {
for file in ASSETS.iter() {
let prog = source(&("assets/".to_owned() + *file)).unwrap();
let symbols = split(&prog[..]);
if let Err(e) = symbols {
panic!("Could not split {} properly: {:?}", file, e);
}
let symbols = symbols.ok().unwrap();
let tokens = lex(&symbols);
if let Err(e) = tokens {
panic!("Could not tokenize {} properly: {:?}", file, e);
}
let tokens = tokens.ok().unwrap();
let exprs = build(&tokens);
for expr in exprs.iter() {
if let Err(e) = expr {
match e {
ParseErr::MismatchedOpenBrace(n)
| ParseErr::MismatchedOpenParen(n)
| ParseErr::MismatchedCloseBrace(n)
| ParseErr::MismatchedCloseParen(n) => panic!(
"Could not build {} properly: {:?}\nContext: {:?}",
file,
e,
&tokens[n - 5..n + 5]
),
e => panic!("Could not build {} properly: {:?}", file, e),
}
}
}
}
}
#[test]
fn failures() {
assert_eq!(
*parse("(")[0].as_ref().err().unwrap(),
ParseErr::MismatchedOpenParen(0)
);
assert_eq!(
*parse("#")[0].as_ref().err().unwrap(),
ParseErr::LoneNumbersign
);
assert_eq!(source("nofile"), None);
assert_eq!(
*parse("abc |# x")[0].as_ref().err().unwrap(),
ParseErr::NoCommentStart
);
assert_eq!(
*parse("x #| abc")[0].as_ref().err().unwrap(),
ParseErr::UnterminatedComment
);
assert_eq!(
*parse("\"abc")[0].as_ref().err().unwrap(),
ParseErr::UnterminatedString(1)
);
}
}
| true |
2b1e6aba5adabab5c7179b13fd62d2395bc06ca0
|
Rust
|
rust-lang/rust-analyzer
|
/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
|
UTF-8
| 3,765 | 2.859375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use ide_db::{famous_defs::FamousDefs, RootDatabase};
use syntax::ast::{self, AstNode, HasName};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: generate_default_from_enum_variant
//
// Adds a Default impl for an enum using a variant.
//
// ```
// enum Version {
// Undefined,
// Minor$0,
// Major,
// }
// ```
// ->
// ```
// enum Version {
// Undefined,
// Minor,
// Major,
// }
//
// impl Default for Version {
// fn default() -> Self {
// Self::Minor
// }
// }
// ```
pub(crate) fn generate_default_from_enum_variant(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
let variant_name = variant.name()?;
let enum_name = variant.parent_enum().name()?;
if !matches!(variant.kind(), ast::StructKind::Unit) {
cov_mark::hit!(test_gen_default_on_non_unit_variant_not_implemented);
return None;
}
if existing_default_impl(&ctx.sema, &variant).is_some() {
cov_mark::hit!(test_gen_default_impl_already_exists);
return None;
}
let target = variant.syntax().text_range();
acc.add(
AssistId("generate_default_from_enum_variant", AssistKind::Generate),
"Generate `Default` impl from this enum variant",
target,
|edit| {
let start_offset = variant.parent_enum().syntax().text_range().end();
let buf = format!(
r#"
impl Default for {enum_name} {{
fn default() -> Self {{
Self::{variant_name}
}}
}}"#,
);
edit.insert(start_offset, buf);
},
)
}
fn existing_default_impl(
sema: &'_ hir::Semantics<'_, RootDatabase>,
variant: &ast::Variant,
) -> Option<()> {
let variant = sema.to_def(variant)?;
let enum_ = variant.parent_enum(sema.db);
let krate = enum_.module(sema.db).krate();
let default_trait = FamousDefs(sema, krate).core_default_Default()?;
let enum_type = enum_.ty(sema.db);
if enum_type.impls_trait(sema.db, default_trait, &[]) {
Some(())
} else {
None
}
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn test_generate_default_from_variant() {
check_assist(
generate_default_from_enum_variant,
r#"
//- minicore: default
enum Variant {
Undefined,
Minor$0,
Major,
}
"#,
r#"
enum Variant {
Undefined,
Minor,
Major,
}
impl Default for Variant {
fn default() -> Self {
Self::Minor
}
}
"#,
);
}
#[test]
fn test_generate_default_already_implemented() {
cov_mark::check!(test_gen_default_impl_already_exists);
check_assist_not_applicable(
generate_default_from_enum_variant,
r#"
//- minicore: default
enum Variant {
Undefined,
Minor$0,
Major,
}
impl Default for Variant {
fn default() -> Self {
Self::Minor
}
}
"#,
);
}
#[test]
fn test_add_from_impl_no_element() {
cov_mark::check!(test_gen_default_on_non_unit_variant_not_implemented);
check_assist_not_applicable(
generate_default_from_enum_variant,
r#"
//- minicore: default
enum Variant {
Undefined,
Minor(u32)$0,
Major,
}
"#,
);
}
#[test]
fn test_generate_default_from_variant_with_one_variant() {
check_assist(
generate_default_from_enum_variant,
r#"
//- minicore: default
enum Variant { Undefi$0ned }
"#,
r#"
enum Variant { Undefined }
impl Default for Variant {
fn default() -> Self {
Self::Undefined
}
}
"#,
);
}
}
| true |
b06e4f1d03551cd08f93419ec93a7285cf13e6e8
|
Rust
|
xgillard/ddo
|
/ddo/examples/max2sat/errors.rs
|
UTF-8
| 1,981 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
// Copyright 2020 Xavier Gillard
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//! This module contains the definition of the errors that can be triggered when
//! parsing an instance of the max2sat problem.
use std::num::ParseIntError;
/// This enumeration simply groups the kind of errors that might occur when parsing a
/// instance file. There can be io errors (file unavailable ?), format error
/// (e.g. the file is not an instance but contains the text of your next paper),
/// or parse int errors (which are actually a variant of the format error since it tells
/// you that the parser expected an integer number but got ... something else).
#[derive(Debug, thiserror::Error)]
pub enum Error {
/// There was an io related error
#[error("io error {0}")]
Io(#[from] std::io::Error),
/// The parser expected to read something that was an integer but got some garbage
#[error("parse int {0}")]
ParseInt(#[from] ParseIntError),
}
| true |
27cbbbb07c1dc63f818962407e4be6ed5ab15d49
|
Rust
|
CryZe/livesplit-lite-core
|
/src/segment.rs
|
UTF-8
| 309 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
#[derive(Clone)]
pub struct Segment {
pub name: String,
}
impl fmt::Display for Segment {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.name)
}
}
impl Segment {
pub fn new(name: String) -> Segment {
Segment { name: name }
}
}
| true |
6121cd16bfb95c5752f517480a371dfc984682df
|
Rust
|
alexbispo/rust_the_book
|
/cap03/temperatures_converter/src/main.rs
|
UTF-8
| 1,664 | 3.75 | 4 |
[] |
no_license
|
use std::io;
fn main() {
println!("Temperature converter!");
println!("===============================================================");
loop {
println!("Quit: q <enter>");
println!("Fahrenheit to Celsius: f <temperature_number> <enter>");
println!("Celsius to Fahrenheit: c <temperature_number> <enter>");
let mut user_input = String::new();
io::stdin()
.read_line(&mut user_input)
.expect("Something was bad!");
println!("");
let user_input = user_input.trim();
if user_input == "q" {
println!("Bye!");
break;
}
let inputs: Vec<&str> = user_input.trim().split(' ').collect();
if inputs.len() != 2 {
println!("Sorry! Invalid option.\n");
continue;
}
let origin_temperature = inputs[0];
let temperature: f32 = match inputs[1].trim().parse() {
Ok(num) => num,
Err(_) => {
println!("Sorry! Invalid option.\n");
continue;
}
};
if origin_temperature == "f" {
let converted_temperature = (temperature - 32.0) * (5.0/9.0);
println!("{} fahrenheit to celsius is {:.2}", temperature, converted_temperature);
} else if origin_temperature == "c" {
let converted_temperature = (temperature * (9.0/5.0)) + 32.0;
println!("{} celsius to fahrenheit is {:.2}", temperature, converted_temperature);
} else {
println!("Sorry! Invalid option.\n");
continue;
}
println!("");
}
}
| true |
70d343ea2276c78f5e6b20ef9090473d65964fd8
|
Rust
|
cbrewster/alcova
|
/alcova/src/live_view.rs
|
UTF-8
| 5,393 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use crate::{
live_socket::{ClientMessage, LiveSocketContext, SocketViewMessage},
LiveSocket, LiveTemplate,
};
use actix::{Actor, ActorContext, Addr, Context, Handler, Message};
use actix_web::{HttpRequest, HttpResponse, Responder};
use jsonwebtoken::{encode, EncodingKey, Header};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct LiveViewId(pub usize);
pub type LiveViewContext<T> = Context<LiveViewActor<T>>;
pub(crate) fn signing_secret() -> String {
std::env::var("ALCOVA_SECRET_KEY").unwrap_or_else(|_| {
warn!("No secret key set! Using unsecure default");
"secret".into()
})
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct Claims<T> {
exp: u64,
pub(crate) data: T,
}
impl<T> Claims<T> {
fn new(minutes: u64, data: T) -> Self {
let exp = std::time::SystemTime::now()
.duration_since(std::time::SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs()
+ (minutes * 60);
Self { exp, data }
}
}
pub trait LiveView: Sized + Unpin + 'static {
type Template: LiveTemplate + Unpin;
type SessionData: Serialize + DeserializeOwned;
fn name() -> &'static str;
fn mount(socket_ctx: &LiveSocketContext, session: Self::SessionData) -> Self;
fn started(&mut self, _ctx: &mut LiveViewContext<Self>) {}
fn handle_event(&mut self, _event: &str, _value: &str, _ctx: &mut LiveViewContext<Self>) {}
fn template(&self) -> Self::Template;
fn to_string(&self, session: &Self::SessionData) -> String {
let key = signing_secret();
// TODO: Not sure how we should handle tokens expiring. Maybe reload the page on the
// client?
let claims = Claims::new(60, session);
let token = encode(
&Header::default(),
&claims,
&EncodingKey::from_secret(key.as_bytes()),
)
.unwrap();
self.template()
.render_with_wrapper(Self::name(), token.as_str())
}
fn to_response(self, session: Self::SessionData) -> LiveViewResponse<Self> {
LiveViewResponse {
live_view: self,
session,
}
}
}
pub trait LiveMessage: Message<Result = ()> {}
pub struct LiveViewResponse<T: LiveView> {
live_view: T,
session: T::SessionData,
}
impl<T> Responder for LiveViewResponse<T>
where
T: LiveView,
{
type Error = actix_web::Error;
type Future = futures::future::Ready<Result<HttpResponse, actix_web::Error>>;
fn respond_to(self, _req: &HttpRequest) -> Self::Future {
let body = self.live_view.to_string(&self.session);
// Create response and set content type
futures::future::ready(Ok(HttpResponse::Ok().body(body)))
}
}
#[derive(Message, Debug, Deserialize)]
#[rtype(result = "()")]
pub enum LiveViewMessage {
ClientAction(LiveViewAction),
Stop,
}
#[derive(Debug, Deserialize)]
pub struct LiveViewAction {
action: String,
value: Option<String>,
}
pub struct LiveViewActor<T: LiveView> {
id: LiveViewId,
pub view: T,
socket: Addr<LiveSocket>,
old_template: Option<T::Template>,
}
impl<T: LiveView + Unpin + 'static> LiveViewActor<T> {
pub fn new(
id: LiveViewId,
socket: Addr<LiveSocket>,
context: &LiveSocketContext,
session: T::SessionData,
) -> Self {
LiveViewActor {
id,
view: T::mount(context, session),
socket,
old_template: None,
}
}
pub fn send_changes(&mut self) {
let template = self.view.template();
let message = ClientMessage::Changes(template.changes(self.old_template.as_ref().unwrap()));
self.old_template = Some(template);
self.socket.do_send(SocketViewMessage { message });
}
}
impl<T> Actor for LiveViewActor<T>
where
T: LiveView + Unpin + 'static,
{
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
self.view.started(ctx);
let template = self.view.template();
let message = ClientMessage::Template {
template: template.render(),
id: self.id,
};
self.old_template = Some(template);
self.socket.do_send(SocketViewMessage { message });
}
}
impl<T> Handler<LiveViewMessage> for LiveViewActor<T>
where
T: LiveView + Unpin + 'static,
{
type Result = ();
fn handle(&mut self, msg: LiveViewMessage, ctx: &mut Self::Context) -> Self::Result {
match msg {
LiveViewMessage::ClientAction(LiveViewAction { action, value }) => {
let value = value.unwrap_or(String::new());
self.view.handle_event(&action, &value, ctx);
self.send_changes();
}
LiveViewMessage::Stop => ctx.stop(),
}
}
}
pub trait LiveHandler<M: LiveMessage>
where
Self: LiveView,
{
fn handle(&mut self, msg: M, ctx: &mut LiveViewContext<Self>);
}
impl<T, M> Handler<M> for LiveViewActor<T>
where
T: LiveView + Unpin + LiveHandler<M> + 'static,
M: LiveMessage,
{
type Result = ();
fn handle(&mut self, msg: M, ctx: &mut Self::Context) -> Self::Result {
self.view.handle(msg, ctx);
self.send_changes();
}
}
| true |
ebedcc11fadf54c9cb0f39de30e39b06574346c1
|
Rust
|
ypoluektovich/tmux-interface-rs
|
/src/commands/windows_and_panes/select_pane.rs
|
UTF-8
| 4,684 | 2.953125 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::commands::constants::*;
use crate::{Error, TmuxCommand, TmuxOutput};
use std::borrow::Cow;
/// Make pane `target-pane` the active pane in window `target-window`
///
/// # Manual
///
/// tmux ^3.1:
/// ```text
/// tmux select-pane [-DdeLlMmRUZ] [-T title] [-t target-pane]
/// (alias: selectp)
/// ```
///
/// tmux ^2.6:
/// ```text
/// tmux select-pane [-DdeLlMmRU] [-T title] [-t target-pane]
/// (alias: selectp)
/// ```
///
/// tmux ^2.1:
/// ```text
/// tmux select-pane [-DdegLlMmRU] [-P style] [-t target-pane]
/// (alias: selectp)
/// ```
///
/// tmux ^2.0:
/// ```text
/// tmux select-pane [-DdeLlRU] [-t target-pane]
/// (alias: selectp)
/// ```
///
/// tmux ^1.5:
/// ```text
/// tmux select-pane [-DLlRU] [-t target-pane]
/// (alias: selectp)
/// ```
///
/// tmux ^1.3:
/// ```text
/// tmux select-pane [-DLRU] [-t target-pane]
/// (alias: selectp)
/// ```
///
/// tmux ^1.0:
/// ```text
/// tmux select-pane [-t target-pane]
/// (alias: selectp)
/// ```
///
/// tmux ^0.8:
/// ```text
/// tmux select-pane [-p pane-index] [-t target-window]
/// (alias: selectp)
/// ```
#[derive(Debug, Clone)]
pub struct SelectPane<'a>(pub TmuxCommand<'a>);
impl<'a> Default for SelectPane<'a> {
fn default() -> Self {
Self(TmuxCommand {
cmd: Some(Cow::Borrowed(SELECT_PANE)),
..Default::default()
})
}
}
impl<'a> SelectPane<'a> {
pub fn new() -> Self {
Default::default()
}
/// `[-D]` - pane below
#[cfg(feature = "tmux_1_3")]
pub fn down(&mut self) -> &mut Self {
self.0.push_flag(D_UPPERCASE_KEY);
self
}
/// `[-d]` - disable input
#[cfg(feature = "tmux_2_0")]
pub fn disable(&mut self) -> &mut Self {
self.0.push_flag(D_LOWERCASE_KEY);
self
}
/// `[-e]` - enable input
#[cfg(feature = "tmux_2_0")]
pub fn enable(&mut self) -> &mut Self {
self.0.push_flag(E_LOWERCASE_KEY);
self
}
/// `[-g]` - show the current pane style
#[cfg(feature = "tmux_2_1")]
pub fn show_style(&mut self) -> &mut Self {
self.0.push_flag(G_LOWERCASE_KEY);
self
}
/// `[-L]` - pane left
#[cfg(feature = "tmux_1_3")]
pub fn left(&mut self) -> &mut Self {
self.0.push_flag(L_UPPERCASE_KEY);
self
}
/// `[-l]` - equivalent to last-pane command
#[cfg(feature = "tmux_1_5")]
pub fn last(&mut self) -> &mut Self {
self.0.push_flag(L_LOWERCASE_KEY);
self
}
/// `[-M]` - clear marked pane
#[cfg(feature = "tmux_2_1")]
pub fn set_marked(&mut self) -> &mut Self {
self.0.push_flag(M_UPPERCASE_KEY);
self
}
/// `[-m]` - set marked pane
#[cfg(feature = "tmux_2_1")]
pub fn clear_marked(&mut self) -> &mut Self {
self.0.push_flag(M_LOWERCASE_KEY);
self
}
/// `[-R]` - pane right
#[cfg(feature = "tmux_1_3")]
pub fn right(&mut self) -> &mut Self {
self.0.push_flag(R_UPPERCASE_KEY);
self
}
/// `[-U]` - pane above
#[cfg(feature = "tmux_1_3")]
pub fn up(&mut self) -> &mut Self {
self.0.push_flag(U_UPPERCASE_KEY);
self
}
/// `[-Z]` - keep the window zoomed if it was zoomed
#[cfg(feature = "tmux_3_1")]
pub fn keep_zoomed(&mut self) -> &mut Self {
self.0.push_flag(Z_UPPERCASE_KEY);
self
}
/// `[-P style]` - set the style for a single pane
#[cfg(feature = "tmux_2_1")]
pub fn style<S: Into<Cow<'a, str>>>(&mut self, style: S) -> &mut Self {
self.0.push_option(P_UPPERCASE_KEY, style);
self
}
/// `[-T title]` - title
#[cfg(feature = "tmux_2_6")]
pub fn title<S: Into<Cow<'a, str>>>(&mut self, title: S) -> &mut Self {
self.0.push_option(T_UPPERCASE_KEY, title);
self
}
/// `[-t target-pane]` - target-pane
#[cfg(feature = "tmux_1_0")]
pub fn target_pane<S: Into<Cow<'a, str>>>(&mut self, target_pane: S) -> &mut Self {
self.0.push_option(T_LOWERCASE_KEY, target_pane);
self
}
pub fn output(&self) -> Result<TmuxOutput, Error> {
self.0.output()
}
}
impl<'a> From<TmuxCommand<'a>> for SelectPane<'a> {
fn from(item: TmuxCommand<'a>) -> Self {
Self(TmuxCommand {
bin: item.bin,
cmd: Some(Cow::Borrowed(SELECT_PANE)),
..Default::default()
})
}
}
impl<'a> From<&TmuxCommand<'a>> for SelectPane<'a> {
fn from(item: &TmuxCommand<'a>) -> Self {
Self(TmuxCommand {
bin: item.bin.clone(),
cmd: Some(Cow::Borrowed(SELECT_PANE)),
..Default::default()
})
}
}
| true |
1b436e87e29d2332e959e3d751d15137af83a859
|
Rust
|
Ryan1729/rote
|
/libs/move_mod/src/move_mod.rs
|
UTF-8
| 1,600 | 3.09375 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use macros::{fmt_display, ord};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Move {
Up,
Down,
Left,
Right,
ToLineStart,
ToLineEnd,
ToBufferStart,
ToBufferEnd,
ToPreviousLikelyEditLocation,
ToNextLikelyEditLocation,
}
use Move::*;
fmt_display!(for Move: r#move in "{}", match r#move {
Up => "^",
Down => "v",
Left => "<",
Right => ">",
ToLineStart => "Line<",
ToLineEnd => "Line>",
ToBufferStart => "Buffer<",
ToBufferEnd => "Buffer>",
ToPreviousLikelyEditLocation => "Edit<",
ToNextLikelyEditLocation => "Edit>",
});
macro_rules! to_num {
($m: expr) => {
match $m {
Up => 0,
Down => 1,
Left => 2,
Right => 3,
ToLineStart => 4,
ToLineEnd => 5,
ToBufferStart => 6,
ToBufferEnd => 7,
ToPreviousLikelyEditLocation => 8,
ToNextLikelyEditLocation => 9,
}
};
}
ord!(for Move: r#move, other in to_num!(r#move).cmp(&to_num!(other)));
impl std::ops::Not for Move {
type Output = Move;
fn not(self) -> Self::Output {
match self {
Up => Down,
Down => Up,
Left => Right,
Right => Left,
ToLineStart => ToLineEnd,
ToLineEnd => ToLineStart,
ToBufferStart => ToBufferEnd,
ToBufferEnd => ToBufferStart,
ToPreviousLikelyEditLocation => ToNextLikelyEditLocation,
ToNextLikelyEditLocation => ToPreviousLikelyEditLocation,
}
}
}
| true |
c8d35e534af7e67f65945615931fceb055007d00
|
Rust
|
mishaszu/RUST-INTRO
|
/src/ex1/match_statement/src/basic_matching.rs
|
UTF-8
| 328 | 3.15625 | 3 |
[] |
no_license
|
fn match_test() {
let country_code = 1000;
let country = match country_code {
44 => "UK",
46 => "Sweden",
1...999 => "unknown",
_ => "invalid",
};
println!("the country for this code is: {}", country);
}
pub fn run() {
println!("Running basic maching");
match_test();
}
| true |
40d6a584f86d61530d0458014433319562701614
|
Rust
|
rust-lang/crates.io
|
/src/email.rs
|
UTF-8
| 9,186 | 2.796875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::path::PathBuf;
use std::sync::Mutex;
use crate::util::errors::{server_error, AppResult};
use crate::config;
use crate::Env;
use lettre::message::header::ContentType;
use lettre::transport::file::FileTransport;
use lettre::transport::smtp::authentication::{Credentials, Mechanism};
use lettre::transport::smtp::SmtpTransport;
use lettre::{Message, Transport};
use rand::distributions::{Alphanumeric, DistString};
#[derive(Debug)]
pub struct Emails {
backend: EmailBackend,
}
impl Emails {
/// Create a new instance detecting the backend from the environment. This will either connect
/// to a SMTP server or store the emails on the local filesystem.
pub fn from_environment(config: &config::Server) -> Self {
let backend = match (
dotenvy::var("MAILGUN_SMTP_LOGIN"),
dotenvy::var("MAILGUN_SMTP_PASSWORD"),
dotenvy::var("MAILGUN_SMTP_SERVER"),
) {
(Ok(login), Ok(password), Ok(server)) => EmailBackend::Smtp {
server,
login,
password,
},
_ => EmailBackend::FileSystem {
path: "/tmp".into(),
},
};
if config.base.env == Env::Production && !matches!(backend, EmailBackend::Smtp { .. }) {
panic!("only the smtp backend is allowed in production");
}
Self { backend }
}
/// Create a new test backend that stores all the outgoing emails in memory, allowing for tests
/// to later assert the mails were sent.
pub fn new_in_memory() -> Self {
Self {
backend: EmailBackend::Memory {
mails: Mutex::new(Vec::new()),
},
}
}
/// Attempts to send a confirmation email.
pub fn send_user_confirm(&self, email: &str, user_name: &str, token: &str) -> AppResult<()> {
// Create a URL with token string as path to send to user
// If user clicks on path, look email/user up in database,
// make sure tokens match
let subject = "Please confirm your email address";
let body = format!(
"Hello {}! Welcome to Crates.io. Please click the
link below to verify your email address. Thank you!\n
https://{}/confirm/{}",
user_name,
crate::config::domain_name(),
token
);
self.send(email, subject, &body)
}
/// Attempts to send an ownership invitation.
pub fn send_owner_invite(
&self,
email: &str,
user_name: &str,
crate_name: &str,
token: &str,
) -> AppResult<()> {
let subject = "Crate ownership invitation";
let body = format!(
"{user_name} has invited you to become an owner of the crate {crate_name}!\n
Visit https://{domain}/accept-invite/{token} to accept this invitation,
or go to https://{domain}/me/pending-invites to manage all of your crate ownership invitations.",
domain = crate::config::domain_name()
);
self.send(email, subject, &body)
}
/// Attempts to send an API token exposure notification email
pub fn send_token_exposed_notification(
&self,
email: &str,
url: &str,
reporter: &str,
source: &str,
token_name: &str,
) -> AppResult<()> {
let subject = "Exposed API token found";
let mut body = format!(
"{reporter} has notified us that your crates.io API token {token_name}\n
has been exposed publicly. We have revoked this token as a precaution.\n
Please review your account at https://{domain} to confirm that no\n
unexpected changes have been made to your settings or crates.\n
\n
Source type: {source}\n",
domain = crate::config::domain_name()
);
if url.is_empty() {
body.push_str("\nWe were not informed of the URL where the token was found.\n");
} else {
body.push_str(&format!("\nURL where the token was found: {url}\n"));
}
self.send(email, subject, &body)
}
/// This is supposed to be used only during tests, to retrieve the messages stored in the
/// "memory" backend. It's not cfg'd away because our integration tests need to access this.
pub fn mails_in_memory(&self) -> Option<Vec<StoredEmail>> {
if let EmailBackend::Memory { mails } = &self.backend {
Some(mails.lock().unwrap().clone())
} else {
None
}
}
fn send(&self, recipient: &str, subject: &str, body: &str) -> AppResult<()> {
// The message ID is normally generated by the SMTP server, but if we let it generate the
// ID there will be no way for the crates.io application to know the ID of the message it
// just sent, as it's not included in the SMTP response.
//
// Our support staff needs to know the message ID to be able to find misdelivered emails.
// Because of that we're generating a random message ID, hoping the SMTP server doesn't
// replace it when it relays the message.
let message_id = format!(
"<{}@{}>",
Alphanumeric.sample_string(&mut rand::thread_rng(), 32),
crate::config::domain_name(),
);
let email = Message::builder()
.message_id(Some(message_id.clone()))
.to(recipient.parse()?)
.from(self.sender_address().parse()?)
.subject(subject)
.header(ContentType::TEXT_PLAIN)
.body(body.to_string())?;
match &self.backend {
EmailBackend::Smtp {
server,
login,
password,
} => {
SmtpTransport::relay(server)
.and_then(|transport| {
transport
.credentials(Credentials::new(login.clone(), password.clone()))
.authentication(vec![Mechanism::Plain])
.build()
.send(&email)
})
.map_err(|error| {
error!(?error, "Failed to send email");
server_error("Failed to send the email")
})?;
info!(?message_id, ?subject, "Email sent");
}
EmailBackend::FileSystem { path } => {
let id = FileTransport::new(path).send(&email).map_err(|error| {
error!(?error, "Failed to send email");
server_error("Email file could not be generated")
})?;
info!(
path = ?path.join(format!("{id}.eml")),
?subject,
"Email sent"
);
}
EmailBackend::Memory { mails } => {
mails.lock().unwrap().push(StoredEmail {
to: recipient.into(),
subject: subject.into(),
body: body.into(),
});
}
}
Ok(())
}
fn sender_address(&self) -> &str {
match &self.backend {
EmailBackend::Smtp { login, .. } => login,
EmailBackend::FileSystem { .. } => "test@localhost",
EmailBackend::Memory { .. } => "test@localhost",
}
}
}
enum EmailBackend {
/// Backend used in production to send mails using SMTP.
Smtp {
server: String,
login: String,
password: String,
},
/// Backend used locally during development, will store the emails in the provided directory.
FileSystem { path: PathBuf },
/// Backend used during tests, will keep messages in memory to allow tests to retrieve them.
Memory { mails: Mutex<Vec<StoredEmail>> },
}
// Custom Debug implementation to avoid showing the SMTP password.
impl std::fmt::Debug for EmailBackend {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EmailBackend::Smtp { server, login, .. } => {
// The password field is *intentionally* not included
f.debug_struct("Smtp")
.field("server", server)
.field("login", login)
.finish()?;
}
EmailBackend::FileSystem { path } => {
f.debug_struct("FileSystem").field("path", path).finish()?;
}
EmailBackend::Memory { .. } => f.write_str("Memory")?,
}
Ok(())
}
}
#[derive(Debug, Clone)]
pub struct StoredEmail {
pub to: String,
pub subject: String,
pub body: String,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sending_to_invalid_email_fails() {
let emails = Emails::new_in_memory();
assert_err!(emails.send(
"String.Format(\"{0}.{1}@live.com\", FirstName, LastName)",
"test",
"test",
));
}
#[test]
fn sending_to_valid_email_succeeds() {
let emails = Emails::new_in_memory();
assert_ok!(emails.send("someone@example.com", "test", "test"));
}
}
| true |
0264df150e0ef52391e318759d05ad4079086336
|
Rust
|
michaelherger/librespot
|
/core/src/util.rs
|
UTF-8
| 589 | 3.09375 | 3 |
[
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] |
permissive
|
use std::mem;
pub trait Seq {
fn next(&self) -> Self;
}
macro_rules! impl_seq {
($($ty:ty)*) => { $(
impl Seq for $ty {
fn next(&self) -> Self { (*self).wrapping_add(1) }
}
)* }
}
impl_seq!(u8 u16 u32 u64 usize);
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Default)]
pub struct SeqGenerator<T: Seq>(T);
impl<T: Seq> SeqGenerator<T> {
pub fn new(value: T) -> Self {
SeqGenerator(value)
}
pub fn get(&mut self) -> T {
let value = self.0.next();
mem::replace(&mut self.0, value)
}
}
| true |
3e59f3b73d6bfb037621cc2624509352efe28e92
|
Rust
|
aheart/hearth
|
/src/metrics/mod.rs
|
UTF-8
| 1,523 | 2.671875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
pub mod aggregator;
mod cpu;
mod disk;
pub mod hub;
mod la;
mod metric_buffer;
mod network;
mod ram;
mod space;
use std::time::SystemTime;
#[derive(PartialEq, Debug)]
pub enum Metrics {
Cpu(cpu::CpuMetrics),
Disk(disk::DiskMetrics),
La(la::LaMetrics),
Net(network::NetMetrics),
Ram(ram::RamMetrics),
Space(space::SpaceMetrics),
}
/// Interface for Metric Plugins that possess the knowledge of retrieving raw metric data and
/// processing this raw data into structured Metric key value pairs.
pub trait MetricPlugin: Send + 'static {
/// Returns a command that should be run in order to retrieve raw data
fn get_query(&self) -> &str;
/// Transforms raw data into a HashMap of metrics
fn process_data(&mut self, raw_data: &str, timestamp: &SystemTime) -> Metrics;
/// Returns a HashMap with keys and empty values
fn empty_metrics(&self) -> Metrics;
}
/// Creates all possible metric plugins and returns them as a HashMap
fn metric_plugin_factory(
disk: &str,
filesystem: &str,
network_interface: &str,
) -> Vec<Box<dyn MetricPlugin>> {
let metric_plugins: Vec<Box<dyn MetricPlugin>> = vec![
Box::new(cpu::CpuMetricPlugin::new()),
Box::new(ram::RamMetricPlugin::new()),
Box::new(la::LoadAverageMetricPlugin::new()),
Box::new(disk::DiskMetricPlugin::new(disk)),
Box::new(network::NetworkMetricPlugin::new(network_interface)),
Box::new(space::SpaceMetricPlugin::new(filesystem)),
];
metric_plugins
}
| true |
81754a6d5d7adb8cc231319c4b1e3c1837370eda
|
Rust
|
lorenzoditucci/calyx
|
/calyx/src/passes/group_to_invoke.rs
|
UTF-8
| 4,197 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use std::rc::Rc;
use itertools::Itertools;
use crate::analysis::ReadWriteSet;
use crate::ir::RRC;
use crate::ir::{
self,
traversal::{Action, Named, VisResult, Visitor},
};
/// Transform groups that are structurally invoking components into equivalent
/// [ir::Invoke] statements.
///
/// For a group to meet the requirements of this pass, it must
/// 1. Only use unguarded assignments
/// 2. Only assign to input ports of one component
/// 3. Assign `1'd1` to the @go port of the component, and
/// 4. Depend directly on the @done port of the component for its done
/// condition.
#[derive(Default)]
pub struct GroupToInvoke;
impl Named for GroupToInvoke {
fn name() -> &'static str {
"group2invoke"
}
fn description() -> &'static str {
"covert groups that structurally invoke one component into invoke statements"
}
}
/// Construct an [ir::Invoke] from an [ir::Group] that has been validated by this pass.
fn construct_invoke(
assigns: &[ir::Assignment],
comp: RRC<ir::Cell>,
) -> ir::Control {
let mut inputs = Vec::new();
let mut outputs = Vec::new();
let cell_is_parent = |port: &ir::Port| -> bool {
if let ir::PortParent::Cell(cell_wref) = &port.parent {
Rc::ptr_eq(&cell_wref.upgrade(), &comp)
} else {
false
}
};
for assign in assigns {
// If the cell's port is being used as a source, add the dst to
// outputs
if cell_is_parent(&assign.src.borrow())
&& assign.src != comp.borrow().get_with_attr("done")
{
let name = assign.src.borrow().name.clone();
outputs.push((name, Rc::clone(&assign.dst)));
}
// If the cell's port is being used as a dest, add the source to
// inputs
if cell_is_parent(&assign.dst.borrow())
&& assign.dst != comp.borrow().get_with_attr("go")
{
let name = assign.dst.borrow().name.clone();
inputs.push((name, Rc::clone(&assign.src)));
}
}
ir::Control::invoke(comp, inputs, outputs)
}
impl Visitor for GroupToInvoke {
fn enable(
&mut self,
s: &mut ir::Enable,
_comp: &mut ir::Component,
_sigs: &ir::LibrarySignatures,
) -> VisResult {
let group = s.group.borrow();
// There should be exactly one component being written to in the
// group.
let mut writes =
ReadWriteSet::write_set(&group.assignments).collect_vec();
if writes.len() != 1 {
return Ok(Action::Continue);
}
// Component must define a @go/@done interface
let cell = writes.pop().unwrap();
let maybe_go_port = cell.borrow().find_with_attr("go");
let maybe_done_port = cell.borrow().find_with_attr("done");
if maybe_go_port.is_none() || maybe_done_port.is_none() {
return Ok(Action::Continue);
}
let go_port = maybe_go_port.unwrap();
let mut go_multi_write = false;
let done_port = maybe_done_port.unwrap();
let mut done_multi_write = false;
for assign in &group.assignments {
// All assignments should be unguaraded.
if !assign.guard.is_true() {
return Ok(Action::Continue);
}
// @go port should have exactly one write and the src should be 1.
if assign.dst == go_port {
if go_multi_write {
return Ok(Action::Continue);
}
if !go_multi_write && assign.src.borrow().is_constant(1, 1) {
go_multi_write = true;
}
}
// @done port should have exactly one read and the dst should be
// group's done signal.
if assign.src == done_port {
if done_multi_write {
return Ok(Action::Continue);
}
if !done_multi_write && assign.dst == group.get("done") {
done_multi_write = true;
}
}
}
Ok(Action::Change(construct_invoke(&group.assignments, cell)))
}
}
| true |
b93dae2a656f75b8afab53af8cbad710419bc1ae
|
Rust
|
Sophie-Williams/GameRoom-Bot
|
/src/command.rs
|
UTF-8
| 836 | 3.09375 | 3 |
[] |
no_license
|
use std::string::String;
use discord::model::{Message, ChannelId, User};
#[derive(Debug)]
pub struct Command {
user: User,
channel_id: ChannelId,
command: String,
args: Vec<String>,
}
impl Command {
pub fn parse(message: &Message) -> Command {
let mut args: Vec<String> = message.content.split_whitespace().map(|s| String::from(s)).collect();
Command {
user: message.author.clone(),
channel_id: message.channel_id.clone(),
command: args.remove(0),
args: args,
}
}
pub fn user(&self) -> &User {
&self.user
}
pub fn channel_id(&self) -> &ChannelId {
&self.channel_id
}
pub fn command(&self) -> &str {
&*self.command
}
pub fn args(&self) -> &Vec<String> {
&self.args
}
}
| true |
4177bdbb65c162c1eac67135c784f12bd836fda9
|
Rust
|
tinaun/playbot_ng_serenity
|
/src/context.rs
|
UTF-8
| 3,842 | 3.046875 | 3 |
[] |
no_license
|
use serenity;
use serenity::model::{
channel::Message,
id::{ChannelId, UserId},
};
use threadpool::ThreadPool;
use regex::Regex;
use std::rc::Rc;
type SendFn = fn(&ThreadPool, ChannelId, &str) -> serenity::Result<()>;
#[derive(Clone)]
pub struct Context<'a> {
body: &'a str,
is_directly_addressed: bool,
send_fn: SendFn,
source: UserId,
source_nickname: &'a str,
target: ChannelId,
client: &'a Message,
pool: &'a ThreadPool,
current_nickname: Rc<String>,
}
impl<'a> Context<'a> {
pub fn new(pool: &'a ThreadPool, message: &'a Message) -> Option<Self> {
lazy_static! {
static ref MENTION: Regex = Regex::new(r"<@[0-9]*>").unwrap();
}
let mut body = &message.content[..];
let id = serenity::CACHE.read().user.id;
let current_nickname = Rc::new(serenity::CACHE.read().user.name.to_owned());
let source_nickname = &message.author.name;
let source = message.author.id;
let target = message.channel_id;
let is_directly_addressed = {
if body.starts_with(current_nickname.as_str()) {
let new_body = body[current_nickname.len()..].trim_left();
let has_separator = new_body.starts_with(":") || new_body.starts_with(",");
if has_separator {
body = new_body[1..].trim_left();
}
has_separator
} else {
let mentioned = message.mentions_user_id(id);
if mentioned {
let mention = MENTION
.captures(body)
.and_then(|cap| cap.get(0))
.unwrap();
body = body[mention.end()..].trim_left();
}
mentioned
}
};
let send_fn: SendFn = |_pool, channel_id, msg| { channel_id.say(msg).map(|_| ()) };
Some(Self {
client: message,
pool,
body,
send_fn,
source,
source_nickname,
target,
is_directly_addressed,
current_nickname
})
}
pub fn body(&self) -> &'a str {
self.body
}
/// Wether the message was aimed directetly at the bot,
/// either via private message or by prefixing a channel message with
/// the bot's name, followed by ',' or ':'.
pub fn is_directly_addressed(&self) -> bool {
self.is_directly_addressed
}
pub fn is_ctcp(&self) -> bool {
false
}
pub fn reply<S: AsRef<str>>(&self, message: S) {
let message = message.as_ref();
eprintln!("Replying: {:?}", message);
for line in message.lines() {
if line.len() > 2000 {
let _ = (self.send_fn)(self.pool, self.target, "<<<message too long for irc>>>");
continue;
}
let _ = (self.send_fn)(self.pool, self.target, line);
}
}
pub fn source(&self) -> UserId {
self.source
}
pub fn source_nickname(&self) -> &'a str {
self.source_nickname
}
pub fn current_nickname(&self) -> Rc<String> {
self.current_nickname.clone()
}
pub fn inline_contexts<'b>(&'b self) -> impl Iterator<Item = Context<'a>> + 'b {
lazy_static! {
static ref INLINE_CMD: Regex = Regex::new(r"\{(.*?)}").unwrap();
}
let body = if self.is_directly_addressed() { "" } else { self.body };
let contexts = INLINE_CMD
.captures_iter(body)
.flat_map(|caps| caps.get(1))
.map(move |body| Context {
body: body.as_str(),
.. self.clone()
});
Box::new(contexts)
}
}
| true |
dee11345a693a0d28811763657a85ef4b000e0b2
|
Rust
|
Larusso/unity-version-manager
|
/uvm_core/src/unity/version/mod.rs
|
UTF-8
| 24,312 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::unity::Installation;
use log::{debug, info};
use regex::Regex;
use semver;
use serde::{self, Deserialize, Deserializer, Serialize, Serializer};
use std::cmp::Ordering;
use std::convert::{AsMut, AsRef, From, TryFrom};
use std::fmt;
use std::path::{Path, PathBuf};
use std::result;
use std::str::FromStr;
mod error;
mod hash;
pub use error::{Result, VersionError};
pub mod manifest;
pub mod module;
use crate::sys::unity::version as version_impl;
pub use self::hash::all_versions;
use self::hash::UnityHashError;
pub use self::version_impl::read_version_from_path;
#[derive(PartialEq, Eq, Ord, Hash, Debug, Clone, Copy, Deserialize)]
pub enum VersionType {
Alpha,
Beta,
Patch,
Final,
}
impl PartialOrd for VersionType {
fn partial_cmp(&self, other: &VersionType) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Eq, Debug, Clone, Hash, PartialOrd)]
pub struct Version {
base: semver::Version,
release_type: VersionType,
revision: u64,
hash: Option<String>,
}
impl Ord for Version {
fn cmp(&self, other: &Version) -> Ordering {
self.base
.cmp(&other.base)
.then(self.release_type.cmp(&other.release_type))
.then(self.revision.cmp(&other.revision))
}
}
impl Serialize for Version {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = self.to_string();
serializer.serialize_str(&s)
}
}
impl<'de> Deserialize<'de> for Version {
fn deserialize<D>(deserializer: D) -> result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Version::from_str(&s).map_err(serde::de::Error::custom)
}
}
impl Version {
pub fn new(
major: u64,
minor: u64,
patch: u64,
release_type: VersionType,
revision: u64,
) -> Version {
let base = semver::Version::new(major, minor, patch);
Version {
base,
release_type,
revision,
hash: None,
}
}
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Version> {
version_impl::read_version_from_path(path)
}
pub fn a(major: u64, minor: u64, patch: u64, revision: u64) -> Version {
let base = semver::Version::new(major, minor, patch);
Version {
base,
release_type: VersionType::Alpha,
revision,
hash: None,
}
}
pub fn b(major: u64, minor: u64, patch: u64, revision: u64) -> Version {
let base = semver::Version::new(major, minor, patch);
Version {
base,
release_type: VersionType::Beta,
revision,
hash: None,
}
}
pub fn p(major: u64, minor: u64, patch: u64, revision: u64) -> Version {
let base = semver::Version::new(major, minor, patch);
Version {
base,
release_type: VersionType::Patch,
revision,
hash: None,
}
}
pub fn f(major: u64, minor: u64, patch: u64, revision: u64) -> Version {
let base = semver::Version::new(major, minor, patch);
Version {
base,
release_type: VersionType::Final,
revision,
hash: None,
}
}
pub fn release_type(&self) -> &VersionType {
&self.release_type
}
pub fn version_hash(&self) -> Result<String> {
self.hash
.as_ref()
.map(|h| h.to_owned())
.ok_or_else(|| VersionError::HashMissing {
source: UnityHashError::Other,
version: self.to_string(),
})
.or_else(|_err| {
hash::hash_for_version(self).map_err(|source| VersionError::HashMissing {
source,
version: self.to_string(),
})
})
}
pub fn major(&self) -> u64 {
self.base.major
}
pub fn minor(&self) -> u64 {
self.base.minor
}
pub fn patch(&self) -> u64 {
self.base.patch
}
pub fn revision(&self) -> u64 {
self.revision
}
#[cfg(unix)]
pub fn find_version_in_file<P: AsRef<Path>>(path: P) -> Result<Version> {
use std::process::{Command, Stdio};
let path = path.as_ref();
debug!("find unity version in Unity executable {}", path.display());
let child = Command::new("strings")
.arg("--")
.arg(path)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
let output = child.wait_with_output()?;
if !output.status.success() {
return Err(VersionError::ExecutableContainsNoVersion(
path.display().to_string(),
));
}
let version = Version::from_str(&String::from_utf8_lossy(&output.stdout))?;
debug!("found version {}", &version);
Ok(version)
}
pub fn base(&self) -> &semver::Version {
&self.base
}
pub fn as_semver(&self) -> semver::Version {
let mut v = self.base.clone();
if self.release_type != VersionType::Final {
v.pre = semver::Prerelease::new(&format!("{}.{}", self.release_type, self.revision))
.unwrap();
}
v
}
pub fn set_version_hash<S: AsRef<str>>(&mut self, hash: Option<S>) {
self.hash = hash.map(|s| s.as_ref().to_owned());
}
pub fn has_version_hash(&self) -> bool {
self.hash.is_some()
}
}
impl PartialEq for Version {
fn eq(&self, other: &Self) -> bool {
let eq = self.base == other.base && self.release_type == other.release_type && self.revision == other.revision;
if self.hash.is_some() && other.hash.is_some() {
return eq && self.hash == other.hash
}
eq
}
}
impl From<(u64, u64, u64, u64)> for Version {
fn from(tuple: (u64, u64, u64, u64)) -> Version {
let (major, minor, patch, revision) = tuple;
Version::f(major, minor, patch, revision)
}
}
impl TryFrom<PathBuf> for Version {
type Error = VersionError;
fn try_from(path: PathBuf) -> Result<Self> {
Version::from_path(path)
}
}
impl TryFrom<&Path> for Version {
type Error = VersionError;
fn try_from(path: &Path) -> Result<Self> {
Version::from_path(path)
}
}
impl fmt::Display for VersionType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
match *self {
VersionType::Final => write!(f, "final"),
VersionType::Patch => write!(f, "patch"),
VersionType::Beta => write!(f, "beta"),
VersionType::Alpha => write!(f, "alpha"),
}
} else {
match *self {
VersionType::Final => write!(f, "f"),
VersionType::Patch => write!(f, "p"),
VersionType::Beta => write!(f, "b"),
VersionType::Alpha => write!(f, "a"),
}
}
}
}
impl Default for VersionType {
fn default() -> VersionType {
VersionType::Final
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}{}{}",
self.base,
self.release_type.to_string(),
self.revision
)
}
}
impl AsRef<Version> for Version {
fn as_ref(&self) -> &Self {
self
}
}
impl AsMut<Version> for Version {
fn as_mut(&mut self) -> &mut Self {
self
}
}
impl FromStr for Version {
type Err = VersionError;
fn from_str(s: &str) -> Result<Self> {
let version_pattern =
Regex::new(r"([0-9]{1,4})\.([0-9]{1,4})\.([0-9]{1,4})(f|p|b|a)([0-9]{1,4})( \(([a-z0-9]{12})\)|/([a-z0-9]{12}))?").unwrap();
match version_pattern.captures(s) {
Some(caps) => {
let major: u64 = caps.get(1).map_or("0", |m| m.as_str()).parse().unwrap();
let minor: u64 = caps.get(2).map_or("0", |m| m.as_str()).parse().unwrap();
let patch: u64 = caps.get(3).map_or("0", |m| m.as_str()).parse().unwrap();
let release_type = match caps.get(4).map_or("", |m| m.as_str()) {
"f" => Some(VersionType::Final),
"p" => Some(VersionType::Patch),
"b" => Some(VersionType::Beta),
"a" => Some(VersionType::Alpha),
_ => None,
};
let revision: u64 = caps.get(5).map_or("0", |m| m.as_str()).parse().unwrap();
let hash = caps.get(7).or(caps.get(8)).map(|m| m.as_str().to_owned());
let base = semver::Version::new(major, minor, patch);
Ok(Version {
base,
revision,
release_type: release_type.unwrap(),
hash: hash,
})
}
None => Err(VersionError::ParsingFailed(s.to_string())),
}
}
}
impl FromStr for VersionType {
type Err = VersionError;
fn from_str(s: &str) -> Result<Self> {
match s {
"f" => Ok(VersionType::Final),
"p" => Ok(VersionType::Patch),
"b" => Ok(VersionType::Beta),
"a" => Ok(VersionType::Alpha),
"final" => Ok(VersionType::Final),
"patch" => Ok(VersionType::Patch),
"beta" => Ok(VersionType::Beta),
"alpha" => Ok(VersionType::Alpha),
_ => Err(VersionError::VersionTypeParsingFailed(s.to_string())),
}
}
}
impl From<Installation> for Version {
fn from(item: Installation) -> Self {
item.version_owned()
}
}
pub fn fetch_matching_version<I: Iterator<Item = Version>>(
versions: I,
version_req: semver::VersionReq,
release_type: VersionType,
) -> Result<Version> {
versions
.filter(|version| {
let semver_version = if version.release_type() < &release_type {
debug!(
"version {} release type is smaller than specified type {:#}",
version, release_type
);
let mut semver_version = version.base().clone();
semver_version.pre = semver::Prerelease::new(&format!(
"{}.{}",
version.release_type, version.revision
))
.unwrap();
semver_version
} else {
let b = version.base().clone();
debug!(
"use base semver version {} of {} for comparison",
b, version
);
b
};
let is_match = version_req.matches(&semver_version);
if is_match {
info!("version {} is a match", version);
} else {
info!("version {} is not a match", version);
}
is_match
})
.max()
.ok_or_else(|| VersionError::NoMatch(version_req.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! invalid_version_input {
($($name:ident: $input:expr),*) => {
$(
#[test]
fn $name() {
let version_string = $input;
let version = Version::from_str(version_string);
assert!(version.is_err(), "invalid input returns None")
}
)*
};
}
macro_rules! valid_version_input {
($($name:ident: $input:expr),*) => {
$(
#[test]
fn $name() {
let version_string = $input;
let version = Version::from_str(version_string);
assert!(version.is_ok(), "valid input returns a version")
}
)*
};
}
invalid_version_input! {
when_version_is_empty: "dsd",
when_version_is_a_random_string: "sdfrersdfgsdf",
when_version_is_a_short_version: "1.2",
when_version_is_semver: "1.2.3",
when_version_contains_unknown_release_type: "1.2.3g2"
}
valid_version_input! {
when_version_has_single_digits: "1.2.3f4",
when_version_has_long_digits: "0.0.0f43",
when_version_has_only_zero_digits: "0.0.0f0",
when_version_has_optional_hash_project_settings_style: "2020.3.38f1 (8f5fde82e2dc)",
when_version_has_optional_hash_unity_hub_style: "2020.3.38f1/8f5fde82e2dc"
}
#[test]
fn parse_version_string_with_valid_input() {
let version_string = "1.2.3f4";
let version = Version::from_str(version_string);
assert!(version.is_ok(), "valid input returns a version")
}
#[test]
fn splits_version_string_into_components() {
let version_string = "1.2.3f4";
let version = Version::from_str(version_string).ok().unwrap();
assert!(version.base.major == 1, "parse correct major component");
assert!(version.base.minor == 2, "parse correct minor component");
assert!(version.base.patch == 3, "parse correct patch component");
assert_eq!(version.release_type, VersionType::Final);
assert!(version.revision == 4, "parse correct revision component");
assert!(version.hash.is_none(), "parse correct optional hash")
}
#[test]
fn splits_version_string_into_components_with_hash() {
let version_string = "1.2.3f4 (abcdefghijkm)";
let version = Version::from_str(version_string).ok().unwrap();
assert!(version.base.major == 1, "parse correct major component");
assert!(version.base.minor == 2, "parse correct minor component");
assert!(version.base.patch == 3, "parse correct patch component");
assert_eq!(version.release_type, VersionType::Final);
assert!(version.revision == 4, "parse correct revision component");
assert!(version.hash.unwrap() == "abcdefghijkm", "parse correct optional hash")
}
#[test]
fn splits_version_string_into_components_with_hash_unity_hub_style() {
let version_string = "1.2.3f4/abcdefghijkm";
let version = Version::from_str(version_string).ok().unwrap();
assert!(version.base.major == 1, "parse correct major component");
assert!(version.base.minor == 2, "parse correct minor component");
assert!(version.base.patch == 3, "parse correct patch component");
assert_eq!(version.release_type, VersionType::Final);
assert!(version.revision == 4, "parse correct revision component");
assert!(version.hash.unwrap() == "abcdefghijkm", "parse correct optional hash")
}
#[test]
fn orders_version_final_release_greater_than_patch() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("1.2.3p4").ok().unwrap();
assert_eq!(Ordering::Greater, version_a.cmp(&version_b));
}
#[test]
fn orders_version_patch_release_greater_than_beta() {
let version_a = Version::from_str("1.2.3p4").ok().unwrap();
let version_b = Version::from_str("1.2.3b4").ok().unwrap();
assert_eq!(Ordering::Greater, version_a.cmp(&version_b));
}
#[test]
fn orders_version_final_release_greater_than_beta() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("1.2.3b4").ok().unwrap();
assert_eq!(Ordering::Greater, version_a.cmp(&version_b));
}
#[test]
fn orders_version_all_equak() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("1.2.3f4").ok().unwrap();
assert_eq!(Ordering::Equal, version_a.cmp(&version_b));
}
#[test]
fn orders_version_major_smaller() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("0.2.3f4").ok().unwrap();
assert_eq!(Ordering::Greater, version_a.cmp(&version_b));
}
#[test]
fn orders_version_minor_smaller() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("1.1.3f4").ok().unwrap();
assert_eq!(Ordering::Greater, version_a.cmp(&version_b));
}
#[test]
fn orders_version_patch_smaller() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("1.2.2f4").ok().unwrap();
assert_eq!(Ordering::Greater, version_a.cmp(&version_b));
}
#[test]
fn orders_version_revision_smaller() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("1.2.3f3").ok().unwrap();
assert_eq!(Ordering::Greater, version_a.cmp(&version_b));
}
#[test]
fn fetch_hash_for_known_version() {
let version = Version::f(2017, 1, 0, 2);
assert_eq!(
version.version_hash().unwrap(),
String::from("66e9e4bfc850")
);
}
#[test]
fn compares_versions() {
let version_a = Version::from_str("1.2.3f4").ok().unwrap();
let version_b = Version::from_str("1.2.3f4").ok().unwrap();
assert_eq!(version_a, version_b, "testing version equality");
let version_c = Version::from_str("1.2.3f4").ok().unwrap();
let version_d = Version::from_str("1.2.3f5").ok().unwrap();
assert_ne!(version_c, version_d, "testing version nonequality");
let version_c = Version::from_str("1.2.3f4").ok().unwrap();
let version_d = Version::from_str("1.2.3f4/1234567890ab").ok().unwrap();
assert_eq!(version_c, version_d, "testing version equality when one version has hash other not");
let version_c = Version::from_str("1.2.3f4/0987654321ab").ok().unwrap();
let version_d = Version::from_str("1.2.3f4/1234567890ab").ok().unwrap();
assert_ne!(version_c, version_d, "testing version equality when one version hash is different");
}
#[cfg(unix)]
#[test]
fn reads_version_from_binary_file() {
use std::io::Write;
use tempfile::Builder;
let mut test_file = Builder::new()
.prefix("version_binary")
.rand_bytes(5)
.tempfile()
.unwrap();
let version = "2018.2.1f2";
let version_hash = "dft74dsds844";
//Some known result patterns
let test_value_1 = format!("Unity {}\n", version);
let test_value_2 = format!("{}_{}\n", version, version_hash);
let test_value_3 = format!("{} ({})\n", version, version_hash);
let test_value_4 = format!("Mozilla/5.0 (MacIntel; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 Safari/537.36 Unity/{} (unity3d.com;\n", version);
let test_value_5 = format!("Invalid serialized file version. File: \"%s\". Expected version: {}. Actual version: %s.\n", version);
let test_value_6 = format!(
"UnityPlayer/{} (UnityWebRequest/1.0, libcurl/7.52.0-DEV)\n",
version
);
let f = test_file.as_file_mut();
let random_bytes: Vec<u8> = (0..2048).map(|_| rand::random::<u8>()).collect();
f.write_all(&random_bytes).unwrap();
f.write_all(test_value_1.as_bytes()).unwrap();
f.write_all(&random_bytes).unwrap();
f.write_all(test_value_2.as_bytes()).unwrap();
f.write_all(&random_bytes).unwrap();
f.write_all(test_value_3.as_bytes()).unwrap();
f.write_all(&random_bytes).unwrap();
f.write_all(test_value_4.as_bytes()).unwrap();
f.write_all(&random_bytes).unwrap();
f.write_all(test_value_5.as_bytes()).unwrap();
f.write_all(&random_bytes).unwrap();
f.write_all(test_value_6.as_bytes()).unwrap();
f.write_all(&random_bytes).unwrap();
let v = Version::find_version_in_file(test_file.path()).unwrap();
assert_eq!(v, Version::f(2018, 2, 1, 2));
}
#[cfg(unix)]
#[test]
fn fails_to_read_version_from_binary_file_if_verion_can_not_be_found() {
use std::io::Write;
use tempfile::Builder;
let mut test_file = Builder::new()
.prefix("version_binary")
.rand_bytes(5)
.tempfile()
.unwrap();
let f = test_file.as_file_mut();
let random_bytes: Vec<u8> = (0..8000).map(|_| rand::random::<u8>()).collect();
f.write_all(&random_bytes).unwrap();
let v = Version::find_version_in_file(test_file.path());
assert!(v.is_err());
}
#[test]
fn fetch_hash_for_unknown_version_yields_none() {
let version = Version::f(2080, 2, 0, 2);
assert!(version.version_hash().is_err());
}
proptest! {
#[test]
fn doesnt_crash(ref s in "\\PC*") {
let _ = Version::from_str(s);
}
#[test]
fn parses_all_valid_versions(ref s in r"[0-9]{1,4}\.[0-9]{1,4}\.[0-9]{1,4}[fpb][0-9]{1,4}") {
Version::from_str(s).ok().unwrap();
}
#[test]
fn parses_version_back_to_original(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {
let v1 = Version {
base: semver::Version::new(major, minor, patch),
revision,
release_type: VersionType::Final,
hash: None
};
let v2 = Version::from_str(&format!("{:04}.{:04}.{:04}f{:04}", major, minor, patch, revision)).ok().unwrap();
prop_assert_eq!(v1, v2);
}
#[test]
fn create_version_from_tuple(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {
let v1 = Version {
base: semver::Version::new(major, minor, patch),
revision,
release_type: VersionType::Final,
hash: None
};
let v2:Version = (major, minor, patch, revision).into();
prop_assert_eq!(v1, v2);
}
#[test]
fn create_version_final_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {
let v1 = Version {
base: semver::Version::new(major, minor, patch),
revision,
release_type: VersionType::Final,
hash: None
};
let v2:Version = Version::f(major, minor, patch, revision);
prop_assert_eq!(v1, v2);
}
#[test]
fn create_version_beta_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {
let v1 = Version {
base: semver::Version::new(major, minor, patch),
revision,
release_type: VersionType::Beta,
hash: None
};
let v2:Version = Version::b(major, minor, patch, revision);
prop_assert_eq!(v1, v2);
}
#[test]
fn create_version_alpha_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {
let v1 = Version {
base: semver::Version::new(major, minor, patch),
revision,
release_type: VersionType::Alpha,
hash: None
};
let v2:Version = Version::a(major, minor, patch, revision);
prop_assert_eq!(v1, v2);
}
#[test]
fn create_version_patch_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {
let v1 = Version {
base: semver::Version::new(major, minor, patch),
revision,
release_type: VersionType::Patch,
hash: None
};
let v2:Version = Version::p(major, minor, patch, revision);
prop_assert_eq!(v1, v2);
}
}
}
| true |
3a3e6db3ec6b081684634f6365f9e1bffc59153d
|
Rust
|
thchittenden/rust-kernel
|
/src/interrupt/timer.rs
|
UTF-8
| 1,688 | 2.9375 | 3 |
[] |
no_license
|
#![allow(dead_code)] // Constants.
use util::asm;
const TIMER_CHAN0: u16 = 0x0040;
const TIMER_CHAN1: u16 = 0x0041;
const TIMER_CHAN2: u16 = 0x0042;
const TIMER_COMM: u16 = 0x0043;
/// The timer frequency in hertz.
const TIMER_FREQ: u32 = 1_193_182;
/// The desired interrupt frequency in hertz.
const INT_FREQ: u32 = 1_000;
/// The timer divider.
const TIMER_DIV: u32 = TIMER_FREQ / INT_FREQ;
/// x86 timer commands.
bitflags! {
flags TimerCommand: u8 {
const Binary = 0b0000_0000,
const BCD = 0b0000_0001,
const Mode0 = 0b0000_0000, // Interrupt on terminal count.
const Mode1 = 0b0000_0010, // Hardware one shot.
const Mode2 = 0b0000_0100, // Rate generator.
const Mode3 = 0b0000_0110, // Square wave.
const Mode4 = 0b0000_1000, // Software strobe.
const Mode5 = 0b0000_1010, // Hardware strobe.
const LoOnly = 0b0001_0000,
const HiOnly = 0b0010_0000,
const LoHi = 0b0011_0000,
const Chan0 = 0b0000_0000,
const Chan1 = 0b0100_0000,
const Chan2 = 0b1000_0000,
}
}
/// Initializes the timer and sets the default frequency.
pub fn init_timer() {
set_frequency(INT_FREQ);
}
/// Sets the frequency of the timer.
///
/// # Panics
///
/// Panics if the requested frequency cannot be set.
pub fn set_frequency(req_freq: u32) {
let div = TIMER_FREQ / req_freq;
assert!(div <= u16::max_value() as u32);
let div_lo = getbyte!(div, 0);
let div_hi = getbyte!(div, 1);
let command = (Binary | Mode3 | LoHi | Chan0).bits;
asm::outb8(TIMER_COMM, command);
asm::outb8(TIMER_CHAN0, div_lo);
asm::outb8(TIMER_CHAN0, div_hi);
}
| true |
991a1ed94869dde2f068b53d42c6ba8b34ee28c3
|
Rust
|
sirxyzzy/ilbm
|
/src/lib.rs
|
UTF-8
| 4,464 | 2.84375 | 3 |
[] |
no_license
|
#[macro_use]
extern crate log;
pub mod iff;
mod bytes;
mod compression;
mod read;
use iff::ChunkId;
use thiserror::Error;
use std::path::Path;
/// Global settings when reading image files
pub struct ReadOptions {
pub read_pixels: bool,
pub page_scale: bool,
}
/// Main entry point
pub fn read_from_file<P: AsRef<Path>>(file: P, options: ReadOptions) -> Result<IlbmImage> {
read::read_file(file, options)
}
/// Custom errors for ilbm library
#[derive(Error, Debug)]
pub enum IlbmError {
#[error("invalid header (expected {expected:?}, found {actual:?})")]
InvalidHeader {
expected: String,
actual: String,
},
#[error("invalid data: {0}")]
InvalidData (
String
),
#[error("File does not contain image data")]
NoImage,
#[error("No planes, possibly a color map with no image data")]
NoPlanes,
#[error("File does not contain image header (FORM.BMHD)")]
NoHeader,
#[error("Color map of map_size {map_size:?} has no entry for {index:?}")]
NoMapEntry{ index: usize, map_size: usize},
#[error("Unexpected end of image data")]
NoData,
#[error("{0} not supported")]
NotSupported(String),
#[error("IO Error")]
Io {
#[from]
source: std::io::Error
},
}
/// Standardize my result Errors
pub type Result<T> = std::result::Result<T,IlbmError>;
#[derive(Debug,Clone,Copy, PartialEq)]
pub enum Masking {
NoMask,
HasMask,
HasTransparentColor,
Lasso
}
impl Default for Masking {
fn default() -> Self { Masking::NoMask }
}
fn as_masking(v: u8) -> Masking {
match v {
0 => Masking::NoMask,
1 => Masking::HasMask,
2 => Masking::HasTransparentColor,
3 => Masking::Lasso,
x => {
error!("Masking value of {} unsupported, mapping to None", x);
Masking::NoMask
}
}
}
/// Display mode, aka ModeID is Amiga specific, and quite complex
/// in terms of interpretation. However, our usage is pretty trivial
// It comes from the CAMG chunk
#[derive(Copy, Debug, Clone, Default)]
pub struct DisplayMode (u32);
impl DisplayMode {
pub fn is_ham(&self) -> bool {self.0 & 0x800 != 0}
pub fn is_halfbrite(&self) -> bool {self.0 & 0x80 != 0}
pub fn new(mode: u32) -> DisplayMode {
DisplayMode(mode)
}
pub fn ham() -> DisplayMode {
DisplayMode(0x800)
}
}
impl std::fmt::Display for DisplayMode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
if self.is_ham() {
write!(f, "0x{:X} HAM", self.0)
} else if self.is_halfbrite() {
write!(f, "0x{:X} HALF", self.0)
} else {
write!(f, "0x{:X}", self.0)
}
}
}
#[derive(Copy, Debug, Clone, Default)]
pub struct RgbValue (u8, u8, u8);
/// This is an amalgam of information drawn from
/// various chunks in the ILBM, mapped to more native
/// types such as usize for u16, and enums for masking
#[derive(Debug, Default)]
pub struct IlbmImage {
pub size: Size2D,
pub map_size: usize,
pub planes: usize,
pub masking: Masking,
pub compression: bool,
pub display_mode: DisplayMode,
pub dpi: Size2D,
pub pixel_aspect: Size2D,
pub transparent_color: usize, // Actually a color index
pub page_size: Size2D,
/// RGB data triples
/// Left to right in row, then top to bottom
/// so indexes look like y * width + x where
/// y=0 is the top
pub pixels: Vec<u8>
}
impl std::fmt::Display for IlbmImage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
let compressed = if self.compression { "Comp" } else { "" };
write!(f, "{} dpi:{} p:{} {} {:?} map:{} mode:{} aspect:{} trans:{} page:{}",
self.size, self.dpi, self.planes,
compressed, self.masking, self.map_size, self.display_mode,
self.pixel_aspect, self.transparent_color, self.page_size)
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Size2D (usize,usize);
impl Size2D {
pub fn width(&self) -> usize {self.0}
pub fn height(&self) -> usize {self.1}
}
impl std::fmt::Display for Size2D {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{}x{}", self.width(), self.height())
}
}
#[derive(Debug, Clone)]
struct ColorMap {
colors: Vec<RgbValue>
}
| true |
98e4d999ddc132ac5e4863dcddad3bb61ae8a932
|
Rust
|
crlf0710/modern-web
|
/mweb/src/classical/lexer.rs
|
UTF-8
| 61,619 | 3.234375 | 3 |
[] |
no_license
|
use crate::utils::U8SpanRef;
use thiserror::Error;
pub mod ascii_char {
#[derive(Copy, Clone, PartialEq)]
enum AsciiCharCategory {
Alphabetic,
Digit,
Symbol,
InlineWhitespace,
LineFeedWhitespace,
Invalid,
}
fn ascii_char_category(ch: u8) -> AsciiCharCategory {
match ch {
0x09 | 0xC | b' ' => AsciiCharCategory::InlineWhitespace,
0xA | 0xD => AsciiCharCategory::LineFeedWhitespace,
b'A'..=b'Z' | b'a'..=b'z' => AsciiCharCategory::Alphabetic,
b'0'..=b'9' => AsciiCharCategory::Digit,
0x0..=0x8 | 0xB | 0xE..=0x1F | 0x7F..=0xFF => AsciiCharCategory::Invalid,
_ => AsciiCharCategory::Symbol,
}
}
#[allow(dead_code)]
pub fn is_invalid_char(ch: u8) -> bool {
ascii_char_category(ch) == AsciiCharCategory::Invalid
}
pub fn is_inline_whitespace_char(ch: u8) -> bool {
let category = ascii_char_category(ch);
category == AsciiCharCategory::InlineWhitespace
}
pub fn is_whitespace_char(ch: u8) -> bool {
let category = ascii_char_category(ch);
category == AsciiCharCategory::InlineWhitespace
|| category == AsciiCharCategory::LineFeedWhitespace
}
#[allow(dead_code)]
pub fn is_alphanumeric_char(ch: u8) -> bool {
let category = ascii_char_category(ch);
category == AsciiCharCategory::Alphabetic || category == AsciiCharCategory::Digit
}
pub fn is_numeric_char(ch: u8) -> bool {
let category = ascii_char_category(ch);
category == AsciiCharCategory::Digit
}
pub fn is_id_start(ch: u8) -> bool {
match ch {
b'A'..=b'Z' | b'a'..=b'z' | b'_' => true,
_ => false,
}
}
pub fn is_id_continue(ch: u8) -> bool {
match ch {
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'_' => true,
_ => false,
}
}
pub fn is_punct_char(ch: u8) -> bool {
let category = ascii_char_category(ch);
category == AsciiCharCategory::Symbol
}
pub fn is_octal_digit(ch: u8) -> bool {
match ch {
b'0'..=b'7' => true,
_ => false,
}
}
pub fn is_hex_digit(ch: u8) -> bool {
match ch {
b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f' => true,
_ => false,
}
}
}
pub mod ascii_str {
use std::fmt::{self, Debug};
use thiserror::Error;
#[repr(transparent)]
#[derive(PartialEq)]
pub struct AsciiStr(pub [u8]);
impl AsciiStr {
pub fn try_split_ending_substr(&self, bytes: &Self) -> (&Self, Option<&Self>) {
if (self.0).ends_with(&bytes.0) {
let pos = self.0.len() - bytes.0.len();
unsafe { std::mem::transmute((&(self.0)[..pos], Some(&(self.0)[pos..]))) }
} else {
(self, None)
}
}
}
#[derive(Error, Debug)]
#[error("not 7-bit ascii string")]
pub struct NotAsciiStrError;
pub fn from_bytes(bytes: &[u8]) -> Result<&AsciiStr, NotAsciiStrError> {
for &byte in bytes {
if byte >= 0x80 {
return Err(NotAsciiStrError);
}
}
unsafe { Ok(std::mem::transmute(bytes)) }
}
impl<'x> Debug for &'x AsciiStr {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let str = std::str::from_utf8(&self.0).map_err(|_| fmt::Error)?;
write!(fmt, "{:?}", str).map_err(|_| fmt::Error)?;
Ok(())
}
}
}
use bitflags::bitflags;
bitflags! {
pub struct LexModeSet : u8 {
const NOTHING = 0;
const COMMENT = 0x1;
const LIMBO = 0x2;
const MODULE_NAME = 0x4;
const STRING_LITERAL = 0x8;
const PASCAL_TEXT = 0x10;
const TEX_TEXT = 0x20;
const DEFINITION_TEXT = 0x40;
const INLINE_PASCAL_TEXT = 0x80;
}
}
impl LexModeSet {
// workaround for https://github.com/bitflags/bitflags/issues/180
const fn const_or(self, other: LexModeSet) -> Self {
LexModeSet::from_bits_truncate(self.bits() | other.bits())
}
const fn contains_mode(&self, mode: LexMode) -> bool {
(self.bits & mode.0) != 0
}
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct LexMode(u8);
use std::fmt;
impl fmt::Debug for LexMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mode_text = match *self {
LexMode::LIMBO => "Limbo",
LexMode::TEX_TEXT => "TeXText",
LexMode::MODULE_NAME => "ModuleName",
LexMode::PASCAL_TEXT => "PascalText",
LexMode::COMMENT => "Comment",
LexMode::STRING_LITERAL => "StrLiteral",
LexMode::DEFINITION_TEXT => "DefinitionText",
LexMode::INLINE_PASCAL_TEXT => "InlinePascalText",
_ => unreachable!(),
};
write!(f, "{}", mode_text).map_err(|_| fmt::Error)?;
Ok(())
}
}
impl LexMode {
pub const LIMBO: LexMode = LexMode(LexModeSet::LIMBO.bits);
pub const TEX_TEXT: LexMode = LexMode(LexModeSet::TEX_TEXT.bits);
pub const MODULE_NAME: LexMode = LexMode(LexModeSet::MODULE_NAME.bits);
pub const PASCAL_TEXT: LexMode = LexMode(LexModeSet::PASCAL_TEXT.bits);
pub const COMMENT: LexMode = LexMode(LexModeSet::COMMENT.bits);
pub const STRING_LITERAL: LexMode = LexMode(LexModeSet::STRING_LITERAL.bits);
pub const DEFINITION_TEXT: LexMode = LexMode(LexModeSet::DEFINITION_TEXT.bits);
pub const INLINE_PASCAL_TEXT: LexMode = LexMode(LexModeSet::INLINE_PASCAL_TEXT.bits);
}
pub mod control_code {
use super::token::BoxedTokenList;
use super::LexModeSet;
#[derive(Copy, Clone)]
pub enum SpecialHandling {
None,
GroupTitle,
ModuleName,
MacroDefinition,
FormatDefinition,
OctalConst,
HexConst,
ControlTextUpToAtGT,
WarnAndIgnore, // occurred in xetex.web:9057
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum ControlCodeKind {
EscapedAt,
DefineModule,
DefineStarredModule,
DefineMacro,
DefineFormat,
DefineProgram,
ModuleName,
OctalConst,
HexConst,
StringPoolChecksum,
MetaCommentBegin,
MetaCommentEnd,
ProgramAdjacent,
ForceIndex,
ForceIndexMono,
ForceIndexStyle9,
ForceHBox,
ForceVerbatim,
ForceEOL,
UnderlineFlag,
NoUnderlineFlag,
FormatThinSpace,
FormatLineBreak,
FormatSuggestLineBreak,
FormatLineBreakLarge,
FormatNoLineBreak,
FormatInvisibleSemicolon,
HiddenEndOfModuleName,
Ignored,
}
#[derive(Debug, PartialEq)]
pub struct ControlCode<'x> {
pub kind: ControlCodeKind,
pub param: Option<BoxedTokenList<'x>>,
}
pub struct ControlCodeInfoRecord {
pub selector: &'static [u8],
pub kind: ControlCodeKind,
pub special_handling: SpecialHandling,
pub terminating_modes: LexModeSet,
pub appliable_modes: LexModeSet,
}
pub const CONTROL_CODE_DATA: &'static [ControlCodeInfoRecord] = &[
ControlCodeInfoRecord {
selector: b"@",
kind: ControlCodeKind::EscapedAt,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::COMMENT
.const_or(LexModeSet::LIMBO)
.const_or(LexModeSet::MODULE_NAME)
.const_or(LexModeSet::PASCAL_TEXT)
.const_or(LexModeSet::STRING_LITERAL)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b" \t\r\n",
kind: ControlCodeKind::DefineModule,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::LIMBO
.const_or(LexModeSet::PASCAL_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
appliable_modes: LexModeSet::LIMBO
.const_or(LexModeSet::PASCAL_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"*",
kind: ControlCodeKind::DefineStarredModule,
special_handling: SpecialHandling::GroupTitle,
terminating_modes: LexModeSet::LIMBO
.const_or(LexModeSet::PASCAL_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
appliable_modes: LexModeSet::LIMBO
.const_or(LexModeSet::PASCAL_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
},
ControlCodeInfoRecord {
selector: b"dD",
kind: ControlCodeKind::DefineMacro,
special_handling: SpecialHandling::MacroDefinition,
terminating_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
},
ControlCodeInfoRecord {
selector: b"fF",
kind: ControlCodeKind::DefineFormat,
special_handling: SpecialHandling::FormatDefinition,
terminating_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
},
ControlCodeInfoRecord {
selector: b"pP",
kind: ControlCodeKind::DefineProgram,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::DEFINITION_TEXT),
},
ControlCodeInfoRecord {
selector: b"<",
kind: ControlCodeKind::ModuleName,
special_handling: SpecialHandling::ModuleName,
terminating_modes: LexModeSet::TEX_TEXT.const_or(LexModeSet::DEFINITION_TEXT),
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT),
},
ControlCodeInfoRecord {
selector: b"\'",
kind: ControlCodeKind::OctalConst,
special_handling: SpecialHandling::OctalConst,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"\"",
kind: ControlCodeKind::HexConst,
special_handling: SpecialHandling::HexConst,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT)
.const_or(LexModeSet::COMMENT /*xetex.web:8641*/),
},
ControlCodeInfoRecord {
selector: b"$",
kind: ControlCodeKind::StringPoolChecksum,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"{",
kind: ControlCodeKind::MetaCommentBegin,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"}",
kind: ControlCodeKind::MetaCommentEnd,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"&",
kind: ControlCodeKind::ProgramAdjacent,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"^",
kind: ControlCodeKind::ForceIndex,
special_handling: SpecialHandling::ControlTextUpToAtGT,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b".",
kind: ControlCodeKind::ForceIndexMono,
special_handling: SpecialHandling::ControlTextUpToAtGT,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b":",
kind: ControlCodeKind::ForceIndexStyle9,
special_handling: SpecialHandling::ControlTextUpToAtGT,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"t",
kind: ControlCodeKind::ForceHBox,
special_handling: SpecialHandling::ControlTextUpToAtGT,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"=",
kind: ControlCodeKind::ForceVerbatim,
special_handling: SpecialHandling::ControlTextUpToAtGT,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"\\",
kind: ControlCodeKind::ForceEOL,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"!",
kind: ControlCodeKind::UnderlineFlag,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"?",
kind: ControlCodeKind::NoUnderlineFlag,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::TEX_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b",",
kind: ControlCodeKind::FormatThinSpace,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"/",
kind: ControlCodeKind::FormatLineBreak,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"|",
kind: ControlCodeKind::FormatSuggestLineBreak,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"#",
kind: ControlCodeKind::FormatLineBreakLarge,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"+",
kind: ControlCodeKind::FormatNoLineBreak,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b";",
kind: ControlCodeKind::FormatInvisibleSemicolon,
special_handling: SpecialHandling::None,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT
.const_or(LexModeSet::DEFINITION_TEXT)
.const_or(LexModeSet::INLINE_PASCAL_TEXT),
},
ControlCodeInfoRecord {
selector: b"z",
kind: ControlCodeKind::Ignored,
special_handling: SpecialHandling::WarnAndIgnore,
terminating_modes: LexModeSet::NOTHING,
appliable_modes: LexModeSet::PASCAL_TEXT,
},
];
pub fn get_control_code_info_record_for_selector(
selector: u8,
) -> Option<&'static ControlCodeInfoRecord> {
use once_cell::sync::Lazy;
static CONTROL_CODE_TABLE: Lazy<[Option<&'static ControlCodeInfoRecord>; 256]> =
Lazy::new(|| {
let mut table = [None; 256];
for item in CONTROL_CODE_DATA.iter() {
for &ch in item.selector.iter() {
assert!(table[ch as usize].is_none());
table[ch as usize] = Some(item);
}
}
table
});
CONTROL_CODE_TABLE[selector as usize]
}
}
pub mod operator {
#[derive(Clone, PartialEq, Debug)]
pub enum Operator {
Plus,
Subtract,
Dereference,
Equal,
NotEqual,
GreaterThan,
LessThan,
GreaterEq,
LessEq,
Multiply,
Divide,
Assign,
}
}
pub mod punctuation {
use super::operator::Operator;
#[derive(Clone, PartialEq, Debug)]
pub enum Punctuation {
Op(Operator),
LParen,
RParen,
LBracket,
RBracket,
RangeUntil,
WithType,
ArgumentSeparator,
EndOfStatement,
DotOrEndOfProgram,
DefineAs,
Dollar,
Backslash, /*xetex.web:24446*/
}
pub struct PunctuationInfo {
pub literal: &'static [u8],
pub kind: Punctuation,
}
pub const PUNCTUATION_TABLE: &[PunctuationInfo] = &[
PunctuationInfo {
literal: b"..",
kind: Punctuation::RangeUntil,
},
PunctuationInfo {
literal: b":=",
kind: Punctuation::Op(Operator::Assign),
},
PunctuationInfo {
literal: b"<>",
kind: Punctuation::Op(Operator::NotEqual),
},
PunctuationInfo {
literal: b"==",
kind: Punctuation::DefineAs,
},
PunctuationInfo {
literal: b">=",
kind: Punctuation::Op(Operator::GreaterEq),
},
PunctuationInfo {
literal: b"<=",
kind: Punctuation::Op(Operator::LessEq),
},
PunctuationInfo {
literal: b">",
kind: Punctuation::Op(Operator::GreaterThan),
},
PunctuationInfo {
literal: b"<",
kind: Punctuation::Op(Operator::LessThan),
},
PunctuationInfo {
literal: b":",
kind: Punctuation::WithType,
},
PunctuationInfo {
literal: b"^",
kind: Punctuation::Op(Operator::Dereference),
},
PunctuationInfo {
literal: b"(",
kind: Punctuation::LParen,
},
PunctuationInfo {
literal: b")",
kind: Punctuation::RParen,
},
PunctuationInfo {
literal: b"[",
kind: Punctuation::LBracket,
},
PunctuationInfo {
literal: b"]",
kind: Punctuation::RBracket,
},
PunctuationInfo {
literal: b",",
kind: Punctuation::ArgumentSeparator,
},
PunctuationInfo {
literal: b";",
kind: Punctuation::EndOfStatement,
},
PunctuationInfo {
literal: b".",
kind: Punctuation::DotOrEndOfProgram,
},
PunctuationInfo {
literal: b"$",
kind: Punctuation::Dollar,
},
PunctuationInfo {
literal: b"=",
kind: Punctuation::Op(Operator::Equal),
},
PunctuationInfo {
literal: b"+",
kind: Punctuation::Op(Operator::Plus),
},
PunctuationInfo {
literal: b"-",
kind: Punctuation::Op(Operator::Subtract),
},
PunctuationInfo {
literal: b"*",
kind: Punctuation::Op(Operator::Multiply),
},
PunctuationInfo {
literal: b"/",
kind: Punctuation::Op(Operator::Divide),
},
PunctuationInfo {
literal: b"\\",
kind: Punctuation::Backslash,
},
];
}
pub mod literal {
use super::ascii_str::AsciiStr;
use super::token::BoxedTokenList;
#[derive(Debug, PartialEq)]
pub enum Literal<'x> {
IntegerU32(u32),
RealF64(f64),
StringLiteral(&'x AsciiStr),
PreprocessedStringLiteral(BoxedTokenList<'x>),
}
}
#[derive(Error, Debug)]
pub enum LexError {
#[error("Unexpected EOF reached before proper finish")]
UnexpectedEOF,
#[error("Not 7-bit ascii byte occurred")]
Not7BitAscii(#[from] ascii_str::NotAsciiStrError),
#[error("Invalid control code")]
InvalidControlCodeChar { control_code: u8, pos: usize },
#[error("Unrecognized symbol starting with '{0}'")]
UnrecognizedPunctuation(char),
#[error("Control code used where it's not usable")]
ControlCodeInNonApplicableMode,
#[error("Control code character '{0}' used where it's not usable")]
ControlCodeCharInNonApplicableMode(char),
#[error("Integer literal overflow: {0} with radix {1}")]
IntegerLiteralOverflow(String, u32),
#[error("Float literal lex error: {0}")]
FloatLiteralLexError(String),
#[error("Numeric literal not properly finished")]
NumericLiteralNotProperlyFinished,
#[error("Control text not properly finished with @>")]
ControlTextNotProperlyFinished,
#[error("Group title not properly finished with .")]
GroupTitleNotProperlyFinished,
#[error("Inline program fragment not properly finished")]
InlineProgFragmentNotProperlyFinished,
#[error("Comment not properly finished with }}")]
CommentNotProperlyFinished,
#[error("Comment nesting too deep")]
CommentNestingTooDeep,
#[error("String literal not properly finished with \'")]
StringLiteralNotProperlyFinished,
#[error("String literal not properly finished with \"")]
PreprocessedStringLiteralNotProperlyFinished,
#[error("Any lex error!")]
AnyLexError,
}
pub enum LexControlFlowNewItem {
Module,
Definition,
ProgramText,
}
pub enum LexControlFlow<'x> {
Continue(U8SpanRef<'x>),
Finish(U8SpanRef<'x>),
StartNew(LexControlFlowNewItem, LexMode, U8SpanRef<'x>),
ModuleNameInlineProgAbort(U8SpanRef<'x>),
}
pub mod token {
use super::ascii_char::{is_hex_digit, is_inline_whitespace_char, is_octal_digit};
use super::ascii_str::{self, AsciiStr};
use super::control_code::ControlCode;
use super::literal::Literal;
use super::punctuation::Punctuation;
use super::{LexControlFlow, LexControlFlowNewItem, LexError, LexMode};
use crate::utils::U8SpanRef;
#[derive(Debug, PartialEq)]
pub enum Token<'x> {
CtrlCode(ControlCode<'x>),
WS,
MacroParamMark,
IdentOrKw(&'x AsciiStr),
Punct(Punctuation),
Literal(Literal<'x>),
Comment(BoxedTokenList<'x>),
InlineProgramFragment(BoxedTokenList<'x>),
TextFragment(&'x AsciiStr),
ModuleNameInlineProgAbort,
}
pub type TokenList<'x> = Vec<Token<'x>>;
pub type BoxedTokenList<'x> = Box<Vec<Token<'x>>>;
fn continue_or_finish(l: U8SpanRef<'_>) -> LexControlFlow<'_> {
if l.is_empty() {
LexControlFlow::Finish(l)
} else {
LexControlFlow::Continue(l)
}
}
fn switch_mode<'x>(control_code: &ControlCode<'x>, l: U8SpanRef<'x>) -> LexControlFlow<'x> {
use super::control_code::ControlCodeKind;
match control_code.kind {
ControlCodeKind::DefineModule | ControlCodeKind::DefineStarredModule => {
LexControlFlow::StartNew(LexControlFlowNewItem::Module, LexMode::TEX_TEXT, l)
}
ControlCodeKind::DefineMacro | ControlCodeKind::DefineFormat => {
LexControlFlow::StartNew(
LexControlFlowNewItem::Definition,
LexMode::DEFINITION_TEXT,
l,
)
}
ControlCodeKind::DefineProgram | ControlCodeKind::ModuleName => {
LexControlFlow::StartNew(
LexControlFlowNewItem::ProgramText,
LexMode::PASCAL_TEXT,
l,
)
}
_ => unreachable! {},
}
}
pub const CONTROL_CODE_PREFIX: u8 = b'@';
pub const INLINE_PROGRAM_FRAGMENT: u8 = b'|';
pub const ESCAPE_CHARACTER: u8 = b'\\';
pub const START_OF_COMMENT: u8 = b'{';
pub const END_OF_COMMENT: u8 = b'}';
pub const LINE_FEED: u8 = b'\n';
pub const CARRIAGE_RETURN: u8 = b'\r';
pub const SIMPLE_ESCAPED_ATAIL: &'static [u8] = b"@@";
pub const END_OF_CONTROL_TEXT: &'static [u8] = b"@>";
pub const START_OF_MACRO_DEFINITION: &'static [u8] = b"@d";
pub const START_OF_FORMAT_DEFINITION: &'static [u8] = b"@f";
pub const MODULE_NAME_INLINE_PROGFRAG_ABORT: &'static [u8] = b"...@>";
pub fn lex_u32_literal_with_radix(l: &[u8], radix: usize) -> Result<Literal, LexError> {
use std::str::from_utf8;
let str = from_utf8(l).unwrap();
if let Ok(v) = u32::from_str_radix(str, radix as u32) {
Ok(Literal::IntegerU32(v))
} else {
Err(LexError::IntegerLiteralOverflow(
str.to_owned(),
radix as u32,
))
}
}
pub fn lex_f64_literal(l: &[u8]) -> Result<Literal, LexError> {
use std::str::{from_utf8, FromStr};
let str = from_utf8(l).unwrap();
if let Ok(v) = f64::from_str(str) {
Ok(Literal::RealF64(v))
} else {
Err(LexError::FloatLiteralLexError(str.to_owned()))
}
}
pub fn lex_numeric_literal(l: U8SpanRef<'_>) -> Result<(Literal, U8SpanRef<'_>), LexError> {
use super::ascii_char::is_numeric_char;
let count_int = l
.bytes()
.iter()
.copied()
.take_while(|&ch| is_numeric_char(ch))
.count();
let has_dot = count_int > 0 && l.bytes()[count_int..].starts_with(b".");
let count_fraction = if has_dot {
l.bytes()[count_int + 1..]
.iter()
.copied()
.take_while(|&ch| is_numeric_char(ch))
.count()
} else {
0
};
if has_dot && count_fraction > 0 {
let (numeric, rest) = l.split_at(count_int + 1 + count_fraction);
let literal = lex_f64_literal(numeric)?;
Ok((literal, rest))
} else if count_int > 0 {
let (numeric, rest) = l.split_at(count_int);
let literal = lex_u32_literal_with_radix(numeric, 10)?;
Ok((literal, rest))
} else {
Err(LexError::NumericLiteralNotProperlyFinished)
}
}
fn lex_maybe_whitespace<'x>(l: U8SpanRef<'x>) -> (&'x [u8], U8SpanRef<'x>) {
use super::ascii_char::is_whitespace_char;
let pos = l
.bytes()
.iter()
.copied()
.take_while(|&ch| is_whitespace_char(ch))
.count();
l.split_at(pos)
}
fn lex_identifier<'x>(l: U8SpanRef<'x>) -> (Option<&'x [u8]>, U8SpanRef<'x>) {
use super::ascii_char::{is_id_continue, is_id_start};
let pos = l
.bytes()
.iter()
.copied()
.enumerate()
.take_while(|&(n, ch)| {
if n == 0 {
is_id_start(ch)
} else {
is_id_continue(ch)
}
})
.count();
if pos == 0 {
(None, l)
} else {
let (head, rest) = l.split_at(pos);
(Some(head), rest)
}
}
fn lex_punct<'x>(l: U8SpanRef<'x>) -> (Option<Punctuation>, U8SpanRef<'x>) {
use super::punctuation::PUNCTUATION_TABLE;
for table_item in PUNCTUATION_TABLE {
if l.starts_with(table_item.literal) {
let literal_len = table_item.literal.len();
let (_, rest) = l.split_at(literal_len);
return (Some(table_item.kind.clone()), rest);
}
}
(None, l)
}
fn lex_control_code_rest<'x>(
l: U8SpanRef<'x>,
mode: LexMode,
) -> Result<(ControlCode<'x>, U8SpanRef<'x>, bool), LexError> {
use super::control_code::get_control_code_info_record_for_selector;
use super::control_code::SpecialHandling;
let selector = l.front_cloned().ok_or_else(|| LexError::UnexpectedEOF)?;
let control_code_info =
get_control_code_info_record_for_selector(selector).ok_or_else(|| {
LexError::InvalidControlCodeChar {
control_code: selector,
pos: l.pos(),
}
})?;
if !control_code_info.appliable_modes.contains_mode(mode) {
return Err(LexError::ControlCodeCharInNonApplicableMode(
selector as char,
));
}
let is_terminator = control_code_info.terminating_modes.contains_mode(mode);
let rest = l.range(1..);
let (control_code, rest) = match control_code_info.special_handling {
SpecialHandling::None => {
let control_code = ControlCode {
kind: control_code_info.kind,
param: None,
};
(control_code, rest)
}
SpecialHandling::GroupTitle => {
let group_title_start = rest
.bytes()
.iter()
.take_while(|&&ch| is_inline_whitespace_char(ch))
.count();
let group_title_end =
memchr::memchr2(b'.', b'\n', rest.bytes()).unwrap_or(rest.len());
let control_text_end;
if !rest.range(group_title_end..).starts_with(b".") {
eprintln!(
"WARN: module group title not finished with dot character, continuing."
);
control_text_end = group_title_end;
//return Err(LexError::GroupTitleNotProperlyFinished);
} else {
control_text_end = group_title_end + 1;
}
let group_title_text =
ascii_str::from_bytes(&rest.bytes()[group_title_start..group_title_end])?;
let control_code = ControlCode {
kind: control_code_info.kind,
param: Some(Box::new(vec![Token::TextFragment(group_title_text)])),
};
(control_code, rest.range(control_text_end..))
}
SpecialHandling::ModuleName => {
let mode = LexMode::MODULE_NAME;
let mut data = rest;
let mut tokens = vec![];
'module_name_loop: loop {
use super::control_code::ControlCodeKind;
let (token, control_flow) = lex_token(data, mode)?;
match control_flow {
LexControlFlow::Continue(rest_data) => {
data = rest_data;
match token {
Token::CtrlCode(ControlCode {
kind: ControlCodeKind::HiddenEndOfModuleName,
..
}) => {
break 'module_name_loop;
}
_ => {
tokens.push(token);
}
}
}
LexControlFlow::Finish(..) => {
return Err(LexError::UnexpectedEOF);
}
LexControlFlow::StartNew(..) => {
return Err(LexError::ControlCodeInNonApplicableMode);
}
LexControlFlow::ModuleNameInlineProgAbort(..) => {
return Err(LexError::ControlCodeInNonApplicableMode);
}
}
}
let control_code = ControlCode {
kind: control_code_info.kind,
param: Some(Box::new(tokens)),
};
(control_code, data)
}
SpecialHandling::FormatDefinition | SpecialHandling::MacroDefinition => {
let mode = LexMode::DEFINITION_TEXT;
let mut data = rest;
let mut tokens = vec![];
'definition_loop: loop {
if data.starts_with(START_OF_MACRO_DEFINITION)
|| data.starts_with(START_OF_FORMAT_DEFINITION)
{
break 'definition_loop;
}
let (token, control_flow) = lex_token(data, mode)?;
match control_flow {
LexControlFlow::Continue(rest_data) => {
data = rest_data;
tokens.push(token);
}
LexControlFlow::Finish(rest_data) => {
data = rest_data;
tokens.push(token);
break 'definition_loop;
}
LexControlFlow::StartNew(..) => {
break 'definition_loop;
}
LexControlFlow::ModuleNameInlineProgAbort(..) => {
return Err(LexError::ControlCodeInNonApplicableMode);
}
}
}
let control_code = ControlCode {
kind: control_code_info.kind,
param: Some(Box::new(tokens)),
};
(control_code, data)
}
SpecialHandling::OctalConst => {
let octal_digit_count = rest
.bytes()
.iter()
.copied()
.take_while(|&ch| is_octal_digit(ch))
.count();
let (octal_digits, rest) = rest.split_at(octal_digit_count);
let literal = lex_u32_literal_with_radix(octal_digits, 8)?;
let control_code = ControlCode {
kind: control_code_info.kind,
param: Some(Box::new(vec![Token::Literal(literal)])),
};
(control_code, rest)
}
SpecialHandling::HexConst => {
let hex_digit_count = rest
.bytes()
.iter()
.copied()
.take_while(|&ch| is_hex_digit(ch))
.count();
let (hex_digits, rest) = rest.split_at(hex_digit_count);
let literal = lex_u32_literal_with_radix(hex_digits, 16)?;
let control_code = ControlCode {
kind: control_code_info.kind,
param: Some(Box::new(vec![Token::Literal(literal)])),
};
(control_code, rest)
}
SpecialHandling::ControlTextUpToAtGT => {
let control_text_len = memchr::memchr3(
CONTROL_CODE_PREFIX,
LINE_FEED,
CARRIAGE_RETURN,
rest.bytes(),
)
.unwrap_or(rest.len());
if !rest
.range(control_text_len..)
.starts_with(END_OF_CONTROL_TEXT)
{
return Err(LexError::ControlTextNotProperlyFinished);
}
let control_code = ControlCode {
kind: control_code_info.kind,
param: Some(Box::new(vec![Token::TextFragment(ascii_str::from_bytes(
&rest.bytes()[..control_text_len],
)?)])),
};
(
control_code,
rest.range(control_text_len + END_OF_CONTROL_TEXT.len()..),
)
}
SpecialHandling::WarnAndIgnore => {
use super::control_code::ControlCodeKind;
eprintln!(
"WARN: %{} occurred in the web file, ignoring.",
selector as char
);
let control_code = ControlCode {
kind: ControlCodeKind::Ignored,
param: None,
};
(control_code, rest)
}
};
Ok((control_code, rest, is_terminator))
}
pub fn lex_comment_rest<'x>(
l: U8SpanRef<'x>,
) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {
let mode = LexMode::COMMENT;
let mut l = l;
let mut tokens = vec![];
let mut level = 1usize;
'comment_loop: loop {
if l.starts_with(b"\\") {
let (head, rest) = l.split_at(2);
if l.len() >= 2 {
let escaped_fragment = Token::TextFragment(ascii_str::from_bytes(head)?);
tokens.push(escaped_fragment);
l = rest;
} else {
return Err(LexError::CommentNotProperlyFinished);
}
} else if l.starts_with(b"{") {
let (head, rest) = l.split_at(1);
let fragment = Token::TextFragment(ascii_str::from_bytes(head)?);
tokens.push(fragment);
level = level
.checked_add(1)
.ok_or(LexError::CommentNestingTooDeep)?;
l = rest;
} else if l.starts_with(b"}") {
let (head, rest) = l.split_at(1);
level -= 1;
if level != 0 {
let fragment = Token::TextFragment(ascii_str::from_bytes(head)?);
tokens.push(fragment);
}
l = rest;
if level == 0 {
break 'comment_loop;
}
} else {
let (token, control_flow) = lex_token(l, mode)?;
match control_flow {
LexControlFlow::Continue(rest_data) => {
l = rest_data;
tokens.push(token);
}
LexControlFlow::Finish(..) => {
return Err(LexError::UnexpectedEOF);
}
LexControlFlow::StartNew(..) => {
return Err(LexError::ControlCodeInNonApplicableMode);
}
LexControlFlow::ModuleNameInlineProgAbort(..) => {
return Err(LexError::ControlCodeInNonApplicableMode);
}
}
}
}
let token = Token::Comment(Box::new(tokens));
Ok((token, continue_or_finish(l)))
}
fn lex_string_literal_rest<'x>(
l: U8SpanRef<'x>,
) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {
// fixme: properly parse string literal
let text_end = memchr::memchr2(b'\'', b'\n', l.bytes()).unwrap_or(l.len());
if !l.range(text_end..).starts_with(b"\'") {
return Err(LexError::StringLiteralNotProperlyFinished);
}
let literal_end = text_end + 1;
let literal_text = ascii_str::from_bytes(&l.bytes()[..text_end])?;
let token = Token::Literal(Literal::StringLiteral(literal_text));
Ok((token, continue_or_finish(l.range(literal_end..))))
}
fn lex_preprocessed_string_literal_rest<'x>(
l: U8SpanRef<'x>,
) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {
// fixme: properly parse string literal
let text_end = memchr::memchr2(b'\"', b'\n', l.bytes()).unwrap_or(l.len());
if !l.range(text_end..).starts_with(b"\"") {
return Err(LexError::PreprocessedStringLiteralNotProperlyFinished);
}
let literal_end = text_end + 1;
let mut tokens = vec![];
tokens.push(Token::TextFragment(ascii_str::from_bytes(
&l.bytes()[..text_end],
)?));
let token = Token::Literal(Literal::PreprocessedStringLiteral(Box::new(tokens)));
Ok((token, continue_or_finish(l.range(literal_end..))))
}
fn lex_inline_prog_rest<'x>(
l: U8SpanRef<'x>,
parent_mode: LexMode,
) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {
let mode = LexMode::INLINE_PASCAL_TEXT;
let mut data = l;
let mut tokens = vec![];
'inline_prog_loop: loop {
if data.starts_with(b"|") {
data = data.range(1..);
break 'inline_prog_loop;
} else {
let (token, control_flow) = lex_token(data, mode)?;
match control_flow {
LexControlFlow::Continue(rest_data) => {
data = rest_data;
tokens.push(token);
}
LexControlFlow::ModuleNameInlineProgAbort(rest_data)
if parent_mode == LexMode::MODULE_NAME =>
{
data = rest_data;
tokens.push(token);
break 'inline_prog_loop;
}
_ => {
return Err(LexError::InlineProgFragmentNotProperlyFinished);
}
}
}
}
let token = Token::InlineProgramFragment(Box::new(tokens));
Ok((token, continue_or_finish(data)))
}
pub fn lex_token<'x>(
l: U8SpanRef<'x>,
mode: LexMode,
) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {
let (l_is_empty, first_ch) = match l.front_cloned() {
Some(ch) => (false, ch),
None => (true, 0),
};
match mode {
LexMode::LIMBO | LexMode::TEX_TEXT if l_is_empty => {
let empty = ascii_str::from_bytes(l.bytes())?;
return Ok((Token::TextFragment(empty), LexControlFlow::Finish(l)));
}
LexMode::DEFINITION_TEXT | LexMode::PASCAL_TEXT if l_is_empty => {
return Ok((Token::WS, LexControlFlow::Finish(l)));
}
_ if l_is_empty => {
return Err(LexError::UnexpectedEOF);
}
LexMode::LIMBO
| LexMode::TEX_TEXT
| LexMode::PASCAL_TEXT
| LexMode::INLINE_PASCAL_TEXT
| LexMode::DEFINITION_TEXT
| LexMode::COMMENT
if first_ch == CONTROL_CODE_PREFIX =>
{
let rest = l.range(1..);
let (control_code, rest, is_terminator) = lex_control_code_rest(rest, mode)?;
if !is_terminator {
return Ok((Token::CtrlCode(control_code), continue_or_finish(rest)));
} else {
let new_mode = switch_mode(&control_code, rest);
return Ok((Token::CtrlCode(control_code), new_mode));
}
}
LexMode::MODULE_NAME if first_ch == CONTROL_CODE_PREFIX => {
use super::control_code::ControlCodeKind;
if l.starts_with(SIMPLE_ESCAPED_ATAIL) {
let control_code = ControlCode {
kind: ControlCodeKind::EscapedAt,
param: None,
};
return Ok((
Token::CtrlCode(control_code),
continue_or_finish(l.range(2..)),
));
} else if l.starts_with(END_OF_CONTROL_TEXT) {
let control_code = ControlCode {
kind: ControlCodeKind::HiddenEndOfModuleName,
param: None,
};
return Ok((
Token::CtrlCode(control_code),
continue_or_finish(l.range(2..)),
));
} else {
return Err(LexError::ControlCodeInNonApplicableMode);
}
}
LexMode::LIMBO | LexMode::TEX_TEXT | LexMode::MODULE_NAME | LexMode::COMMENT
if first_ch == INLINE_PROGRAM_FRAGMENT =>
{
let rest = l.range(1..);
return lex_inline_prog_rest(rest, mode);
}
LexMode::LIMBO | LexMode::TEX_TEXT | LexMode::MODULE_NAME | LexMode::COMMENT => {
use memchr::{memchr, memchr2};
debug_assert_ne!(first_ch, CONTROL_CODE_PREFIX);
debug_assert_ne!(first_ch, INLINE_PROGRAM_FRAGMENT);
let text_len = if mode == LexMode::LIMBO {
memchr(CONTROL_CODE_PREFIX, l.bytes())
} else if mode != LexMode::COMMENT {
memchr2(CONTROL_CODE_PREFIX, INLINE_PROGRAM_FRAGMENT, l.bytes())
} else {
let count = l
.bytes()
.iter()
.take_while(|&&ch| {
ch != CONTROL_CODE_PREFIX
&& ch != INLINE_PROGRAM_FRAGMENT
&& ch != ESCAPE_CHARACTER
&& ch != START_OF_COMMENT
&& ch != END_OF_COMMENT
})
.count();
Some(count)
}
.unwrap_or_else(|| l.len());
let (text, rest) = l.split_at(text_len);
let text = ascii_str::from_bytes(text)?;
return Ok((Token::TextFragment(text), continue_or_finish(rest)));
}
LexMode::PASCAL_TEXT | LexMode::DEFINITION_TEXT | LexMode::INLINE_PASCAL_TEXT => {
use super::ascii_char;
debug_assert!(first_ch != CONTROL_CODE_PREFIX);
if ascii_char::is_whitespace_char(first_ch) {
let (_, rest) = lex_maybe_whitespace(l);
return Ok((Token::WS, continue_or_finish(rest)));
} else if ascii_char::is_id_start(first_ch) {
let (id, rest) = lex_identifier(l);
let id = id.expect("");
return Ok((
Token::IdentOrKw(ascii_str::from_bytes(id)?),
continue_or_finish(rest),
));
} else if first_ch == b'{' {
let rest = l.range(1..);
return lex_comment_rest(rest);
} else if first_ch == b'\'' {
let rest = l.range(1..);
return lex_string_literal_rest(rest);
} else if first_ch == b'\"' {
let rest = l.range(1..);
return lex_preprocessed_string_literal_rest(rest);
} else if first_ch == b'#' {
let rest = l.range(1..);
return Ok((Token::MacroParamMark, continue_or_finish(rest)));
} else if mode == LexMode::INLINE_PASCAL_TEXT
&& first_ch == b'.'
&& l.starts_with(MODULE_NAME_INLINE_PROGFRAG_ABORT)
{
return Ok((
Token::ModuleNameInlineProgAbort,
LexControlFlow::ModuleNameInlineProgAbort(l),
));
} else if ascii_char::is_punct_char(first_ch) {
let (punct, rest) = lex_punct(l);
let punct =
punct.ok_or_else(|| LexError::UnrecognizedPunctuation(first_ch as char))?;
return Ok((Token::Punct(punct), continue_or_finish(rest)));
} else if ascii_char::is_numeric_char(first_ch) {
let (numeric, rest) = lex_numeric_literal(l)?;
return Ok((Token::Literal(numeric), continue_or_finish(rest)));
} else {
unimplemented!("{:?}", first_ch);
}
}
_ => unimplemented!(),
}
}
}
pub struct LexerRawBuf<'x> {
mode: LexMode,
data: U8SpanRef<'x>,
}
#[derive(Default)]
pub struct LexerLimboBuf<'x> {
pub(crate) limbo_tokens: token::TokenList<'x>,
}
pub struct LexerModuleBuf<'x> {
pub(crate) module_type: token::Token<'x>,
pub(crate) text_in_tex: token::TokenList<'x>,
pub(crate) definitions: token::TokenList<'x>,
pub(crate) code_in_pascal: token::TokenList<'x>,
}
#[derive(Clone, Copy)]
enum LexerInternalState {
LimboDirty,
LimboFilledModuleDirty,
LimboFilledEOF,
LimboTakenModuleDirty,
ModuleFilledNextModuleDirty,
ModuleFilledEOF,
EOF,
}
pub struct WEBLexer<'x> {
raw_buf: LexerRawBuf<'x>,
state: LexerInternalState,
limbo_buf: Option<LexerLimboBuf<'x>>,
module_buf: Option<LexerModuleBuf<'x>>,
next_module_buf: Option<LexerModuleBuf<'x>>,
}
impl<'x> WEBLexer<'x> {
pub fn new(data: &'x [u8]) -> Self {
let raw_buf = LexerRawBuf {
mode: LexMode::LIMBO,
data: U8SpanRef::new(data),
};
let limbo_buf = Some(Default::default());
let state = LexerInternalState::LimboDirty;
let module_buf = None;
let next_module_buf = None;
WEBLexer {
raw_buf,
state,
limbo_buf,
module_buf,
next_module_buf,
}
}
fn refill(&mut self) -> Result<(), LexError> {
let mut output_module;
match self.state {
LexerInternalState::LimboDirty => {
output_module = None;
}
LexerInternalState::LimboTakenModuleDirty
| LexerInternalState::LimboFilledModuleDirty => {
output_module = Some(self.module_buf.as_mut().unwrap());
}
LexerInternalState::ModuleFilledNextModuleDirty => {
output_module = Some(self.next_module_buf.as_mut().unwrap());
}
LexerInternalState::LimboFilledEOF
| LexerInternalState::ModuleFilledEOF
| LexerInternalState::EOF => {
return Ok(());
}
}
let mut pending_token = None;
'outer: loop {
let output_tokenlist;
if let Some(module) = &mut output_module {
output_tokenlist = match self.raw_buf.mode {
LexMode::TEX_TEXT => &mut module.text_in_tex,
LexMode::DEFINITION_TEXT => &mut module.definitions,
LexMode::PASCAL_TEXT => &mut module.code_in_pascal,
_ => unreachable!(),
};
} else {
assert!(self.raw_buf.mode == LexMode::LIMBO);
output_tokenlist = &mut self.limbo_buf.as_mut().unwrap().limbo_tokens;
}
if let Some(token) = pending_token.take() {
output_tokenlist.push(token);
}
'inner: loop {
let (token, control_flow) = token::lex_token(self.raw_buf.data, self.raw_buf.mode)?;
match control_flow {
LexControlFlow::Continue(rest_data) => {
output_tokenlist.push(token);
self.raw_buf.data = rest_data;
continue 'inner;
}
LexControlFlow::Finish(rest_data) => {
output_tokenlist.push(token);
self.raw_buf.data = rest_data;
self.state = match self.state {
LexerInternalState::LimboDirty => LexerInternalState::LimboFilledEOF,
LexerInternalState::LimboTakenModuleDirty => {
LexerInternalState::ModuleFilledEOF
}
LexerInternalState::LimboFilledModuleDirty
| LexerInternalState::ModuleFilledNextModuleDirty
| LexerInternalState::LimboFilledEOF
| LexerInternalState::ModuleFilledEOF
| LexerInternalState::EOF => unreachable!(),
};
break 'outer;
}
LexControlFlow::StartNew(
LexControlFlowNewItem::Module,
new_mode,
rest_data,
) => {
self.raw_buf.mode = new_mode;
self.raw_buf.data = rest_data;
let new_module = LexerModuleBuf {
module_type: token,
text_in_tex: Default::default(),
definitions: Default::default(),
code_in_pascal: Default::default(),
};
self.state = match self.state {
LexerInternalState::LimboDirty => {
assert!(self.module_buf.is_none());
self.module_buf = Some(new_module);
LexerInternalState::LimboFilledModuleDirty
}
LexerInternalState::LimboTakenModuleDirty => {
assert!(self.next_module_buf.is_none());
self.next_module_buf = Some(new_module);
LexerInternalState::ModuleFilledNextModuleDirty
}
LexerInternalState::LimboFilledModuleDirty
| LexerInternalState::ModuleFilledNextModuleDirty
| LexerInternalState::LimboFilledEOF
| LexerInternalState::ModuleFilledEOF
| LexerInternalState::EOF => unreachable!(),
};
break 'outer;
}
LexControlFlow::StartNew(
LexControlFlowNewItem::Definition,
new_mode,
rest_data,
)
| LexControlFlow::StartNew(
LexControlFlowNewItem::ProgramText,
new_mode,
rest_data,
) => {
assert!(pending_token.is_none());
pending_token = Some(token);
self.raw_buf.mode = new_mode;
self.raw_buf.data = rest_data;
continue 'outer;
}
LexControlFlow::ModuleNameInlineProgAbort(..) => {
unreachable!();
}
}
}
}
Ok(())
}
pub fn lex_limbo(&mut self) -> Result<Option<LexerLimboBuf<'x>>, LexError> {
self.refill()?;
let result;
self.state = match self.state {
LexerInternalState::LimboDirty | LexerInternalState::LimboTakenModuleDirty => unreachable!(),
LexerInternalState::LimboFilledModuleDirty => {
result = self.limbo_buf.take();
LexerInternalState::LimboTakenModuleDirty
}
LexerInternalState::LimboFilledEOF => {
result = self.limbo_buf.take();
LexerInternalState::EOF
}
LexerInternalState::ModuleFilledNextModuleDirty
| LexerInternalState::ModuleFilledEOF
| LexerInternalState::EOF => {
result = None;
self.state
}
};
Ok(result)
}
pub fn lex_module(&mut self) -> Result<Option<LexerModuleBuf<'x>>, LexError> {
self.refill()?;
let result;
self.state = match self.state {
LexerInternalState::LimboDirty | LexerInternalState::LimboTakenModuleDirty => unreachable!(),
LexerInternalState::LimboFilledModuleDirty | LexerInternalState::LimboFilledEOF => {
// must be called in the wrong order.
unreachable!();
}
LexerInternalState::ModuleFilledNextModuleDirty => {
use std::mem::swap;
result = self.module_buf.take();
swap(&mut self.module_buf, &mut self.next_module_buf);
LexerInternalState::LimboTakenModuleDirty
}
LexerInternalState::ModuleFilledEOF => {
result = self.module_buf.take();
LexerInternalState::EOF
}
LexerInternalState::EOF => {
result = None;
self.state
}
};
Ok(result)
}
}
| true |
24ec11896677f646b439567f03f5daa166f81c60
|
Rust
|
unovor/frame
|
/asn1_der-0.6.3/src/types/boolean.rs
|
UTF-8
| 702 | 2.671875 | 3 |
[
"BSD-2-Clause",
"MIT"
] |
permissive
|
use ::{ Asn1DerError, types::{ FromDerObject, IntoDerObject }, der::{ DerObject, DerTag} };
impl FromDerObject for bool {
fn from_der_object(der_object: DerObject) -> Result<Self, Asn1DerError> {
if der_object.tag != DerTag::Boolean { return Err(Asn1DerError::InvalidTag) }
match der_object.value.data.as_slice() {
&[0x00u8] => Ok(false),
&[0xffu8] => Ok(true),
_ => return Err(Asn1DerError::InvalidEncoding)
}
}
}
impl IntoDerObject for bool {
fn into_der_object(self) -> DerObject {
DerObject::new(DerTag::Boolean, match self {
true => vec![0xffu8],
false => vec![0x00u8]
}.into())
}
fn serialized_len(&self) -> usize {
DerObject::compute_serialized_len(1)
}
}
| true |
88d58e93555a8aad6ff888974be5d5196671451d
|
Rust
|
esavier/keynesis
|
/src/passport/block/content.rs
|
UTF-8
| 7,809 | 2.828125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::passport::block::{EntryError, EntrySlice, EntryType, Hash, Hasher};
use std::{
convert::TryInto as _,
fmt::{self, Formatter},
iter::FusedIterator,
ops::Deref,
};
use thiserror::Error;
#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Hash)]
pub struct Content(Box<[u8]>);
#[derive(Ord, PartialOrd, Eq, PartialEq)]
pub(crate) struct ContentMut<'a>(&'a mut Vec<u8>);
#[derive(Ord, PartialOrd, Eq, PartialEq, Copy, Clone, Hash)]
pub struct ContentSlice<'a>(&'a [u8]);
#[derive(Debug, Error)]
pub enum ContentError {
#[error("Content's max size has been reached, cannot add the entry")]
MaxSizeReached,
#[error("The content has {extra} bytes we do not know what they are for, it could the buffer was truncated")]
InvalidLength { extra: usize },
#[error("Invalid entry")]
Entry(
#[from]
#[source]
EntryError,
),
}
pub struct ContentSliceIter<'a>(&'a [u8]);
impl Content {
pub const MAX_SIZE: usize = u16::MAX as usize;
pub fn as_slice(&self) -> ContentSlice<'_> {
ContentSlice(&self.0)
}
pub fn iter(&self) -> ContentSliceIter<'_> {
self.as_slice().iter()
}
pub fn hash(&self) -> Hash {
self.as_slice().hash()
}
}
impl<'a> ContentMut<'a> {
pub(crate) fn new(bytes: &'a mut Vec<u8>) -> Self {
Self(bytes)
}
#[cfg(test)]
fn into_content(self) -> Content {
Content(self.0.to_owned().into_boxed_slice())
}
pub(crate) fn push(&mut self, entry: EntrySlice<'_>) -> Result<(), ContentError> {
let current_size = self.0.len();
let needed_size = current_size + entry.as_ref().len();
if needed_size > Content::MAX_SIZE {
return Err(ContentError::MaxSizeReached);
}
self.0.extend_from_slice(entry.as_ref());
Ok(())
}
}
impl<'a> ContentSlice<'a> {
pub fn iter(&self) -> ContentSliceIter<'a> {
ContentSliceIter(self.0)
}
pub fn to_content(&self) -> Content {
Content(self.0.to_vec().into_boxed_slice())
}
pub fn from_slice_unchecked(slice: &'a [u8]) -> Self {
Self(slice)
}
pub fn try_from_slice(slice: &'a [u8]) -> Result<Self, ContentError> {
if slice.len() > Content::MAX_SIZE {
return Err(ContentError::MaxSizeReached);
}
let content = Self(slice);
let mut slice = content.0;
while slice.len() >= 2 {
let entry_type =
EntryType::try_from_u16(u16::from_be_bytes(slice[..2].try_into().unwrap()))?;
let size = entry_type.size(&slice[2..]);
let _ = EntrySlice::try_from_slice(&slice[..size])?;
slice = &slice[size..];
}
if slice.is_empty() {
Ok(content)
} else {
Err(ContentError::InvalidLength { extra: slice.len() })
}
}
pub fn hash(&self) -> Hash {
Hasher::hash(self.0)
}
}
impl<'a> IntoIterator for ContentSlice<'a> {
type IntoIter = ContentSliceIter<'a>;
type Item = EntrySlice<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a> Iterator for ContentSliceIter<'a> {
type Item = EntrySlice<'a>;
fn next(&mut self) -> Option<Self::Item> {
if self.0.is_empty() {
None
} else {
let entry_type =
EntryType::try_from_u16(u16::from_be_bytes(self.0[..2].try_into().unwrap()))
.unwrap();
let size = entry_type.size(&self.0[2..]);
let entry = EntrySlice::from_slice_unchecked(&self.0[..size]);
self.0 = &self.0[size..];
Some(entry)
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.0.is_empty() {
(0, Some(0))
} else {
(1, None)
}
}
}
impl<'a> FusedIterator for ContentSliceIter<'a> {}
impl<'a> fmt::Debug for ContentSlice<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl fmt::Debug for Content {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a> AsRef<[u8]> for ContentSlice<'a> {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl<'a> Deref for ContentMut<'a> {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'a> Deref for ContentSlice<'a> {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for Content {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
key::{curve25519, ed25519::PublicKey},
passport::block::{Entry, EntryMut},
Seed,
};
use quickcheck::{Arbitrary, Gen};
impl Arbitrary for Content {
fn arbitrary(g: &mut Gen) -> Self {
let max = usize::arbitrary(g) % 12;
let mut bytes = Vec::with_capacity(1024);
let mut content = ContentMut::new(&mut bytes);
for _ in 0..max {
let entry = Entry::arbitrary(g);
match content.push(entry.as_slice()) {
Ok(()) => (),
Err(ContentError::MaxSizeReached) => break,
Err(error) => {
// another error occurred, it should not happen but
// better ready than sorry
unreachable!(&error)
}
}
}
content.into_content()
}
}
/// test to make sure we detect the limit of the Content
/// when using `push`
#[test]
fn too_long_fail() {
let content = [0; Content::MAX_SIZE + 1];
match ContentSlice::try_from_slice(&content) {
Err(ContentError::MaxSizeReached) => (),
Err(error) => panic!("Didn't expect this error: {:?}", error),
Ok(_) => panic!("Content should have failed with too long error"),
}
}
#[test]
fn test_shared_entry_only() {
let mut rng = quickcheck::Gen::new(1024);
let max = 1;
let mut bytes = Vec::with_capacity(1024);
let mut content = ContentMut::new(&mut bytes);
for _ in 0..max {
let mut entry_bytes = Vec::with_capacity(1024);
let key = curve25519::SecretKey::arbitrary(&mut rng);
let mut builder = EntryMut::new_set_shared_key(&mut entry_bytes, &key.public_key());
let passphrase = Option::<Seed>::arbitrary(&mut rng);
let mut entry_rng = Seed::arbitrary(&mut rng).into_rand_chacha();
let count = u8::arbitrary(&mut rng) % 12 + 1;
for _ in 0..count {
builder
.share_with(
&mut entry_rng,
&key,
&PublicKey::arbitrary(&mut rng),
&passphrase,
)
.expect("valid share to this key");
}
let entry = builder.finalize().expect("valid key sharing entry");
match content.push(entry) {
Ok(()) => (),
Err(ContentError::MaxSizeReached) => break,
Err(error) => {
// another error occurred, it should not happen but
// better ready than sorry
unreachable!(&error)
}
}
}
let _ = content.into_content();
}
#[quickcheck]
fn decode_slice(content: Content) -> bool {
ContentSlice::try_from_slice(&content.0).unwrap();
true
}
}
| true |
fe2b27935168a910385475d8c99f7a74ebb18257
|
Rust
|
kyle-rader/advent_of_code
|
/src/rust/aoc/src/auth.rs
|
UTF-8
| 2,264 | 2.875 | 3 |
[] |
no_license
|
use std::fs::{self, File};
use std::io::Write;
use std::path::PathBuf;
use crate::aoc_client::AocClient;
use crate::cookies::aoc_session_token_first;
use anyhow::anyhow;
use directories::ProjectDirs;
pub fn login(token: Option<String>) -> anyhow::Result<()> {
// Get
let token = match token {
Some(token) => {
println!("📝 Using token provided on CLI");
token
}
None => {
println!("🍪 Using token from FireFox cookies");
aoc_session_token_first()?
}
};
// Test
let client = AocClient::new(&token);
let user_name = client.user_name()?;
println!("✅ Token works!");
// Save
let cache_file = save_token(&token)?;
println!("💾 Token saved at {}", &cache_file.display());
println!("🚀 Welcome, {user_name}! Happy solving 🎉");
Ok(())
}
pub fn logout() -> anyhow::Result<()> {
let cache_file = cache_file()?;
if cache_file.exists() {
fs::remove_file(cache_file)?;
println!("🗑️ token cache removed");
} else {
println!("🔎 no token cache found");
}
Ok(())
}
pub fn get_token() -> anyhow::Result<String> {
let cache_file = cache_file()?;
if !cache_file.exists() {
println!("⚠️ Attempting to auto login");
login(None)?
}
match fs::read_to_string(&cache_file) {
Ok(token) => Ok(token),
Err(err) => Err(anyhow!(
"❌ {err}\nUnable to read token. (Make sure you have run the `login` command)"
)),
}
}
fn save_token(token: &String) -> anyhow::Result<PathBuf> {
let cache_file = cache_file()?;
let mut file = File::create(&cache_file)?;
file.write_all(token.as_bytes())?;
Ok(cache_file)
}
fn cache_file() -> anyhow::Result<PathBuf> {
let Some(project_dir) = ProjectDirs::from("com", "advent_of_code", "aoc_cli") else { return Err(anyhow!("Could not get project directory")) };
let cache_dir = project_dir.cache_dir();
if !cache_dir.exists() {
fs::create_dir_all(cache_dir)?;
}
Ok(cache_dir.join("aoc.cache"))
}
// todo: function to retrieve cached token
// todo: logout
| true |
e84f63686921b6e2c38786095c792e852a256097
|
Rust
|
nparthas/project_euler
|
/src/q1_50/q16.rs
|
UTF-8
| 458 | 2.953125 | 3 |
[] |
no_license
|
extern crate num;
extern crate num_bigint;
use self::num_bigint::{BigInt, ToBigInt};
use std::ops::Mul;
pub fn q16() -> i64 {
let num_str: String = {
let mut num: BigInt = 2.to_bigint().unwrap();
for _ in 1..1000 {
num = num.mul(2);
}
num.to_str_radix(10)
};
let mut dig_sum: i64 = 0;
for c in num_str.chars() {
dig_sum += c.to_digit(10).unwrap() as i64;
}
return dig_sum;
}
| true |
da72f242775804c301adbff5e55ce4d923f6a425
|
Rust
|
adjivas/vt100-rust
|
/tests/csi.rs
|
UTF-8
| 13,668 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
extern crate vt100;
mod support;
use support::TestHelpers;
#[test]
fn absolute_movement() {
let mut screen = vt100::Screen::new(24, 80);
assert_eq!(screen.cursor_position(), (0, 0));
screen.assert_process(b"\x1b[10;10H");
assert_eq!(screen.cursor_position(), (9, 9));
screen.assert_process(b"\x1b[d");
assert_eq!(screen.cursor_position(), (0, 9));
screen.assert_process(b"\x1b[15d");
assert_eq!(screen.cursor_position(), (14, 9));
screen.assert_process(b"\x1b[H");
assert_eq!(screen.cursor_position(), (0, 0));
screen.assert_process(b"\x1b[8H");
assert_eq!(screen.cursor_position(), (7, 0));
screen.assert_process(b"\x1b[15G");
assert_eq!(screen.cursor_position(), (7, 14));
screen.assert_process(b"\x1b[G");
assert_eq!(screen.cursor_position(), (7, 0));
screen.assert_process(b"\x1b[0;0H");
assert_eq!(screen.cursor_position(), (0, 0));
screen.assert_process(b"\x1b[1;1H");
assert_eq!(screen.cursor_position(), (0, 0));
screen.assert_process(b"\x1b[500;500H");
assert_eq!(screen.cursor_position(), (23, 79));
}
#[test]
fn relative_movement() {
let mut screen = vt100::Screen::new(24, 80);
assert_eq!(screen.cursor_position(), (0, 0));
screen.assert_process(b"\x1b[C");
assert_eq!(screen.cursor_position(), (0, 1));
screen.assert_process(b"\x1b[C");
assert_eq!(screen.cursor_position(), (0, 2));
screen.assert_process(b"\x1b[20C");
assert_eq!(screen.cursor_position(), (0, 22));
screen.assert_process(b"\x1b[D");
assert_eq!(screen.cursor_position(), (0, 21));
screen.assert_process(b"\x1b[D");
assert_eq!(screen.cursor_position(), (0, 20));
screen.assert_process(b"\x1b[9D");
assert_eq!(screen.cursor_position(), (0, 11));
screen.assert_process(b"\x1b[500C");
assert_eq!(screen.cursor_position(), (0, 79));
screen.assert_process(b"\x1b[500D");
assert_eq!(screen.cursor_position(), (0, 0));
screen.assert_process(b"\x1b[B");
assert_eq!(screen.cursor_position(), (1, 0));
screen.assert_process(b"\x1b[B");
assert_eq!(screen.cursor_position(), (2, 0));
screen.assert_process(b"\x1b[20B");
assert_eq!(screen.cursor_position(), (22, 0));
screen.assert_process(b"\x1b[A");
assert_eq!(screen.cursor_position(), (21, 0));
screen.assert_process(b"\x1b[A");
assert_eq!(screen.cursor_position(), (20, 0));
screen.assert_process(b"\x1b[9A");
assert_eq!(screen.cursor_position(), (11, 0));
screen.assert_process(b"\x1b[500B");
assert_eq!(screen.cursor_position(), (23, 0));
screen.assert_process(b"\x1b[500A");
assert_eq!(screen.cursor_position(), (0, 0));
}
#[test]
fn ed() {
let mut screen = vt100::Screen::new(24, 80);
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[10;12H\x1b[0J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[5;7H\x1b[1J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n r\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[7;7H\x1b[2J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[2J\x1b[H");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[10;12H\x1b[J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[2J\x1b[H");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[10;12H\x1b[?0J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[5;7H\x1b[?1J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n r\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[7;7H\x1b[?2J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[2J\x1b[H");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[10;12H\x1b[?J");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
}
#[test]
fn el() {
let mut screen = vt100::Screen::new(24, 80);
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"foo\x1b[5;5Hbarbar\x1b[10;10Hbazbaz\x1b[20;20Hquux");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n barbar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[5;8H\x1b[0K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[10;13H\x1b[1K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[20;22H\x1b[2K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[1;2H\x1b[K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "f\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[2J\x1b[H");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"foo\x1b[5;5Hbarbar\x1b[10;10Hbazbaz\x1b[20;20Hquux");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n barbar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[5;8H\x1b[?0K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[10;13H\x1b[?1K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n");
screen.assert_process(b"\x1b[20;22H\x1b[?2K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[1;2H\x1b[?K");
assert_eq!(screen.window_contents(0, 0, 23, 79), "f\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
}
#[test]
fn ich_dch_ech() {
let mut screen = vt100::Screen::new(24, 80);
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[10;10Hfoobar");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[10;12H\x1b[3@");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fo obar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 11));
screen.assert_process(b"\x1b[4P");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 11));
screen.assert_process(b"\x1b[100@");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fo\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 11));
screen.assert_process(b"obar");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 15));
screen.assert_process(b"\x1b[10;12H\x1b[100P");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fo\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 11));
screen.assert_process(b"obar");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 15));
screen.assert_process(b"\x1b[10;13H\x1b[X");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foo ar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 12));
screen.assert_process(b"\x1b[10;11H\x1b[4X");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n f r\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 10));
screen.assert_process(b"\x1b[10;11H\x1b[400X");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n f\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 10));
}
#[test]
fn il_dl() {
let mut screen = vt100::Screen::new(24, 80);
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"\x1b[10;10Hfoobar\x1b[3D");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 12));
screen.assert_process(b"\x1b[L");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 12));
screen.assert_process(b"\x1b[3L");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 12));
screen.assert_process(b"\x1b[500L");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (9, 12));
screen.assert_process(b"\x1b[10;10Hfoobar\x1b[3D\x1b[6A");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (3, 12));
screen.assert_process(b"\x1b[M");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (3, 12));
screen.assert_process(b"\x1b[3M");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (3, 12));
screen.assert_process(b"\x1b[500M");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (3, 12));
}
#[test]
fn scroll() {
let mut screen = vt100::Screen::new(24, 80);
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
screen.assert_process(b"1\r\n2\r\n3\r\n4\r\n5\r\n6\r\n7\r\n8\r\n9\r\n10\r\n11\r\n12\r\n13\r\n14\r\n15\r\n16\r\n17\r\n18\r\n19\r\n20\r\n21\r\n22\r\n23\r\n24");
assert_eq!(screen.window_contents(0, 0, 23, 79), "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n");
screen.assert_process(b"\x1b[15;15H");
assert_eq!(screen.cursor_position(), (14, 14));
screen.assert_process(b"\x1b[S");
assert_eq!(screen.window_contents(0, 0, 23, 79), "2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n");
assert_eq!(screen.cursor_position(), (14, 14));
screen.assert_process(b"\x1b[3S");
assert_eq!(screen.window_contents(0, 0, 23, 79), "5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n\n\n\n");
assert_eq!(screen.cursor_position(), (14, 14));
screen.assert_process(b"\x1b[T");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n\n\n");
assert_eq!(screen.cursor_position(), (14, 14));
screen.assert_process(b"\x1b[5T");
assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n");
assert_eq!(screen.cursor_position(), (14, 14));
}
| true |
9aa78b2edba994bb6f3f7e5fdf7b37e563bdb3d7
|
Rust
|
dermesser/leveldb-rs
|
/src/asyncdb.rs
|
UTF-8
| 9,918 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::hash_map::HashMap;
use std::path::Path;
use crate::{Options, Result, Status, StatusCode, WriteBatch, DB};
use tokio::sync::mpsc;
use tokio::sync::oneshot;
use tokio::task::{spawn_blocking, JoinHandle};
const CHANNEL_BUFFER_SIZE: usize = 32;
#[derive(Clone, Copy)]
pub struct SnapshotRef(usize);
/// A request sent to the database thread.
enum Request {
Close,
Put { key: Vec<u8>, val: Vec<u8> },
Delete { key: Vec<u8> },
Write { batch: WriteBatch, sync: bool },
Flush,
GetAt { snapshot: SnapshotRef, key: Vec<u8> },
Get { key: Vec<u8> },
GetSnapshot,
DropSnapshot { snapshot: SnapshotRef },
CompactRange { from: Vec<u8>, to: Vec<u8> },
}
/// A response received from the database thread.
enum Response {
OK,
Error(Status),
Value(Option<Vec<u8>>),
Snapshot(SnapshotRef),
}
/// Contains both a request and a back-channel for the reply.
struct Message {
req: Request,
resp_channel: oneshot::Sender<Response>,
}
/// `AsyncDB` makes it easy to use LevelDB in a tokio runtime.
/// The methods follow very closely the main API (see `DB` type). Iteration is not yet implemented.
///
/// TODO: Make it work in other runtimes as well. This is a matter of adapting the blocking thread
/// mechanism as well as the channel types.
pub struct AsyncDB {
jh: JoinHandle<()>,
send: mpsc::Sender<Message>,
}
impl AsyncDB {
/// Create a new or open an existing database.
pub fn new<P: AsRef<Path>>(name: P, opts: Options) -> Result<AsyncDB> {
let db = DB::open(name, opts)?;
let (send, recv) = mpsc::channel(CHANNEL_BUFFER_SIZE);
let jh = spawn_blocking(move || AsyncDB::run_server(db, recv));
Ok(AsyncDB { jh, send })
}
pub async fn close(&self) -> Result<()> {
let r = self.process_request(Request::Close).await?;
match r {
Response::OK => Ok(()),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn put(&self, key: Vec<u8>, val: Vec<u8>) -> Result<()> {
let r = self.process_request(Request::Put { key, val }).await?;
match r {
Response::OK => Ok(()),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn delete(&self, key: Vec<u8>) -> Result<()> {
let r = self.process_request(Request::Delete { key }).await?;
match r {
Response::OK => Ok(()),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn write(&self, batch: WriteBatch, sync: bool) -> Result<()> {
let r = self.process_request(Request::Write { batch, sync }).await?;
match r {
Response::OK => Ok(()),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn flush(&self) -> Result<()> {
let r = self.process_request(Request::Flush).await?;
match r {
Response::OK => Ok(()),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn get(&self, key: Vec<u8>) -> Result<Option<Vec<u8>>> {
let r = self.process_request(Request::Get { key }).await?;
match r {
Response::Value(v) => Ok(v),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn get_at(&self, snapshot: SnapshotRef, key: Vec<u8>) -> Result<Option<Vec<u8>>> {
let r = self
.process_request(Request::GetAt { snapshot, key })
.await?;
match r {
Response::Value(v) => Ok(v),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn get_snapshot(&self) -> Result<SnapshotRef> {
let r = self.process_request(Request::GetSnapshot).await?;
match r {
Response::Snapshot(sr) => Ok(sr),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
/// As snapshots returned by `AsyncDB::get_snapshot()` are sort-of "weak references" to an
/// actual snapshot, they need to be dropped explicitly.
pub async fn drop_snapshot(&self, snapshot: SnapshotRef) -> Result<()> {
let r = self
.process_request(Request::DropSnapshot { snapshot })
.await?;
match r {
Response::OK => Ok(()),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
pub async fn compact_range(&self, from: Vec<u8>, to: Vec<u8>) -> Result<()> {
let r = self
.process_request(Request::CompactRange { from, to })
.await?;
match r {
Response::OK => Ok(()),
Response::Error(s) => Err(s),
_ => Err(Status {
code: StatusCode::AsyncError,
err: "Wrong response type in AsyncDB.".to_string(),
}),
}
}
async fn process_request(&self, req: Request) -> Result<Response> {
let (tx, rx) = oneshot::channel();
let m = Message {
req,
resp_channel: tx,
};
if let Err(e) = self.send.send(m).await {
return Err(Status {
code: StatusCode::AsyncError,
err: e.to_string(),
});
}
let resp = rx.await;
match resp {
Err(e) => Err(Status {
code: StatusCode::AsyncError,
err: e.to_string(),
}),
Ok(r) => Ok(r),
}
}
fn run_server(mut db: DB, mut recv: mpsc::Receiver<Message>) {
let mut snapshots = HashMap::new();
let mut snapshot_counter: usize = 0;
while let Some(message) = recv.blocking_recv() {
match message.req {
Request::Close => {
message.resp_channel.send(Response::OK).ok();
recv.close();
return;
}
Request::Put { key, val } => {
let ok = db.put(&key, &val);
send_response(message.resp_channel, ok);
}
Request::Delete { key } => {
let ok = db.delete(&key);
send_response(message.resp_channel, ok);
}
Request::Write { batch, sync } => {
let ok = db.write(batch, sync);
send_response(message.resp_channel, ok);
}
Request::Flush => {
let ok = db.flush();
send_response(message.resp_channel, ok);
}
Request::GetAt { snapshot, key } => {
let snapshot_id = snapshot.0;
if let Some(snapshot) = snapshots.get(&snapshot_id) {
let ok = db.get_at(&snapshot, &key);
match ok {
Err(e) => {
message.resp_channel.send(Response::Error(e)).ok();
}
Ok(v) => {
message.resp_channel.send(Response::Value(v)).ok();
}
};
} else {
message
.resp_channel
.send(Response::Error(Status {
code: StatusCode::AsyncError,
err: "Unknown snapshot reference: this is a bug".to_string(),
}))
.ok();
}
}
Request::Get { key } => {
let r = db.get(&key);
message.resp_channel.send(Response::Value(r)).ok();
}
Request::GetSnapshot => {
snapshots.insert(snapshot_counter, db.get_snapshot());
let sref = SnapshotRef(snapshot_counter);
snapshot_counter += 1;
message.resp_channel.send(Response::Snapshot(sref)).ok();
}
Request::DropSnapshot { snapshot } => {
snapshots.remove(&snapshot.0);
send_response(message.resp_channel, Ok(()));
}
Request::CompactRange { from, to } => {
let ok = db.compact_range(&from, &to);
send_response(message.resp_channel, ok);
}
}
}
}
}
fn send_response(ch: oneshot::Sender<Response>, result: Result<()>) {
if let Err(e) = result {
ch.send(Response::Error(e)).ok();
} else {
ch.send(Response::OK).ok();
}
}
| true |
35e0b54b4037574ec6d98644f5f8c7d0568c401c
|
Rust
|
Joxx0r/RustRevEngine
|
/src/misc/input.rs
|
UTF-8
| 2,512 | 3.078125 | 3 |
[] |
no_license
|
use crate::math::vec::Vec2;
use glfw::{Key, Action, MouseButton};
use crate::misc::camera::Camera;
use crate::misc::camera::Camera_Movement::*;
pub struct input_state {
pub left_mouse_button_down: bool,
pub mouse_position: Vec2,
pub forward_key_down: bool,
pub back_key_down: bool,
pub left_key_down: bool,
pub right_key_down: bool,
pub esc_button_down: bool
}
impl input_state {
pub fn default() -> input_state {
return input_state{
left_mouse_button_down: false,
mouse_position: Vec2::default(),
forward_key_down: false,
back_key_down: false,
left_key_down: false,
right_key_down: false,
esc_button_down: false
}
}
}
pub fn calculate_input(window: &mut glfw::Window) -> input_state {
let mut state: input_state = input_state::default();
state.forward_key_down = window.get_key(Key::W) == Action::Press;
state.back_key_down = window.get_key(Key::S) == Action::Press;
state.left_key_down = window.get_key(Key::A) == Action::Press;
state.right_key_down = window.get_key(Key::D) == Action::Press;
state.esc_button_down = window.get_key(Key::Escape) == Action::Press;
state.left_mouse_button_down = window.get_mouse_button(MouseButton::Button1) == Action::Press;
state.mouse_position = Vec2::new_tuple_f64(window.get_cursor_pos());
state
}
pub unsafe fn process_input(window: &mut glfw::Window, delta_time: f32, camera: &mut Camera, state:input_state) {
if state.esc_button_down {
window.set_should_close(true)
}
if state.forward_key_down {
camera.ProcessKeyboard(FORWARD, delta_time);
}
if state.back_key_down {
camera.ProcessKeyboard(BACKWARD, delta_time);
}
if state.left_key_down {
camera.ProcessKeyboard(LEFT, delta_time);
}
if state.right_key_down {
camera.ProcessKeyboard( RIGHT, delta_time);
}
static mut prev_frame_mouse_button_down: bool = false;
static mut old_mouse_position:Vec2 = Vec2::default();
if(state.left_mouse_button_down) {
if(prev_frame_mouse_button_down) {
let delta = Vec2::new_tuple_f32((state.mouse_position.x - old_mouse_position.x, -1.0 * (state.mouse_position.y - old_mouse_position.y)));
camera.ProcessMouseMovement(delta.x, delta.y, false);
}
old_mouse_position = state.mouse_position;
}
prev_frame_mouse_button_down = state.left_mouse_button_down;
}
| true |
dca7e0facdb300a9c8f85b455b55f5bd534dd19a
|
Rust
|
hubris-lang/hubris
|
/src/hubris_rt/src/lib.rs
|
UTF-8
| 493 | 3.125 | 3 |
[
"MIT"
] |
permissive
|
use std::rc::Rc;
use std::mem::transmute;
struct ObjValue {
ptr: *mut usize,
}
pub struct Obj(Rc<ObjValue>);
impl Obj {
pub fn from<T>(t: T) -> Obj {
unsafe {
let boxed_val = Box::new(t);
let val = ObjValue {
ptr: transmute(Box::into_raw(boxed_val)),
};
Obj(Rc::new(val))
}
}
pub fn unbox<T>(&self) -> &T {
let ptr: *mut usize = self.0.ptr;
unsafe { transmute(ptr) }
}
}
| true |
00c5e51dfc68f6afdf5222d95fb4ccfe2d728d48
|
Rust
|
bollo35/cryptopals
|
/src/bin/p040.rs
|
UTF-8
| 3,181 | 3.09375 | 3 |
[] |
no_license
|
extern crate ooga;
use ooga::rsa::Rsa;
extern crate openssl;
use openssl::bn::{BigNum, BigNumContext};
use std::ops::{Add, Div, Mul, Sub};
fn main() {
let rsa0 = Rsa::new();
let rsa1 = Rsa::new();
let rsa2 = Rsa::new();
let msg = "Never gonna give you up! Never gonna let you down!".to_string();
let ct0 = BigNum::from_slice(&rsa0.enc_str(msg.clone()).unwrap()).unwrap();
let ct1 = BigNum::from_slice(&rsa1.enc_str(msg.clone()).unwrap()).unwrap();
let ct2 = BigNum::from_slice(&rsa2.enc_str(msg.clone()).unwrap()).unwrap();
let og = bncbrt(BigNum::from_slice(&ct0.to_vec()).unwrap());
println!("og: {:?}", String::from_utf8(og.to_vec()));
println!("C0: {:?}", ct0);
println!("C1: {:?}", ct1);
println!("C2: {:?}", ct2);
println!();
println!();
println!();
let (e0, n0) = rsa0.get_pubkey();
let (e1, n1) = rsa1.get_pubkey();
let (e2, n2) = rsa2.get_pubkey();
println!("n0 == n1: {}", n0 == n1);
println!("n2 == n1: {}", n2 == n1);
println!("n0 == n2: {}", n0 == n2);
println!("e0: {}", e0);
println!("e1: {}", e1);
println!("e2: {}", e2);
// N0 = n1 * n2
let N0 = n1.mul(&n2);
// N1 = n0 * n2
let N1 = n0.mul(&n2);
// N2 = n0 * n1
let N2 = n0.mul(&n1);
let mut bnctx = BigNumContext::new().unwrap();
// a0 = invmod(N0, n0)
let mut a0 = BigNum::new().unwrap();
a0.mod_inverse(&N0, &n0, &mut bnctx).unwrap();
// a1 = invmod(N1, n1)
let mut a1 = BigNum::new().unwrap();
a1.mod_inverse(&N1, &n1, &mut bnctx).unwrap();
// a2 = invmod(N2, n2)
let mut a2 = BigNum::new().unwrap();
a2.mod_inverse(&N2, &n2, &mut bnctx).unwrap();
// p0 = c0 * N0 * a0
let p0 = ct0.mul(&N0).mul(&a0);
// p1 = c1 * N1 * a1
let p1 = ct1.mul(&N1).mul(&a1);
// p2 = c2 * N2 * a2
let p2 = ct2.mul(&N2).mul(&a2);
// In the instructions, they say that you don't need to take the result
// modulo N_012 but that doesn't make sense.
// The interesting thing is that if you have a message that's smaller
// than N, and you know e = 3, you could just take the cubed root
// without the chinese remainder theorem. I don't quite get the point
// of this exercise. Ah, I guess the only thing I can think of is if
// you have a message that gets broken into chunks, then you could
// do this still? I don't know.
let mut m_e = BigNum::new().unwrap();
m_e.mod_add(&p0.add(&p1), &p2, &n0.mul(&n1).mul(&n2), &mut bnctx).unwrap();
println!("m^e: {}", m_e);
let m = bncbrt(BigNum::from_slice(&m_e.to_vec()).unwrap());
let m_3 = m.mul(&m).mul(&m);
if m != m_e {
println!("m_e - m'_3 = {}", m_e.sub(&m_3));
}
let msg = String::from_utf8(m.to_vec());
println!("Recovered message: {:?}", msg);
}
fn bncbrt(n: BigNum) -> BigNum {
let one = BigNum::from_u32(1).unwrap();
let two = BigNum::from_u32(2).unwrap();
// let's do a binary search...
let mut high = BigNum::from_slice(&n.to_vec()).unwrap();
let mut low = BigNum::from_u32(0).unwrap();
let mut guess = high.add(&low).div(&two);
let mut cube = guess.mul(&guess).mul(&guess);
while cube != n && high != low {
if cube > n {
high = guess.sub(&one);
} else {
low = guess.add(&one);
}
guess = high.add(&low).div(&two);
cube = guess.mul(&guess).mul(&guess);
}
guess
}
| true |
9bd1c7e7f17365d5f76e9cea6cf3c85305130196
|
Rust
|
ushkarev/rusty-kms
|
/src/key_store/key/tags.rs
|
UTF-8
| 942 | 3.125 | 3 |
[
"MIT"
] |
permissive
|
use crate::key_store::errors::*;
use crate::key_store::tag::Tag;
use crate::key_store::key::{Key, State};
impl Key {
pub fn add_tag(&mut self, tag: Tag) -> Result<(), AddTagError> {
if let State::PendingDeletion(_) = self.state {
return Err(AddTagError::InvalidState);
}
if let Some(existing_tag) = self.tags.iter_mut().find(|t| t.key() == tag.key()) {
*existing_tag = tag;
} else {
self.tags.push(tag);
}
Ok(())
}
pub fn remove_tag(&mut self, tag_key: &str) -> Result<Tag, RemoveTagError> {
if let State::PendingDeletion(_) = self.state {
return Err(RemoveTagError::InvalidState);
}
// TODO: should removing a non-existant tag fail?
self.tags.iter()
.position(|t| t.key() == tag_key)
.map(|index| self.tags.remove(index))
.ok_or(RemoveTagError::NotFound)
}
}
| true |
9294846647301ee2e68c1e8190d55b981ccf53fd
|
Rust
|
leptonyu/salak.rs
|
/src/lib.rs
|
UTF-8
| 10,594 | 3.671875 | 4 |
[
"MIT"
] |
permissive
|
//! Salak is a multi layered configuration loader and zero-boilerplate configuration parser, with many predefined sources.
//!
//! 1. [About](#about)
//! 2. [Quick Start](#quick-start)
//! 3. [Features](#features)
//! * [Predefined Sources](#predefined-sources)
//! * [Key Convention](#key-convention)
//! * [Value Placeholder Parsing](#value-placeholder-parsing)
//! * [Attributes For Derive](#attributes-for-derive)
//! * [Reload Configuration](#reload-configuration)
//! * [Resource Factory](#resource-factory)
//!
//! ## About
//! `salak` is a multi layered configuration loader with many predefined sources. Also it
//! is a zero-boilerplate configuration parser which provides an auto-derive procedure macro
//! to derive [`FromEnvironment`] so that we can parse configuration structs without any additional codes.
//!
//! ## Quick Start
//! A simple example of `salak`:
//!
//! ```
//! use salak::*;
//!
//! #[derive(Debug, FromEnvironment)]
//! #[salak(prefix = "config")]
//! struct Config {
//! #[salak(default = false)]
//! verbose: bool,
//! optional: Option<String>,
//! #[salak(name = "val")]
//! value: i64,
//! }
//! let env = Salak::builder()
//! .set("config.val", "2021")
//! .build()
//! .unwrap();
//! let config = env.get::<Config>().unwrap();
//! assert_eq!(2021, config.value);
//! assert_eq!(None, config.optional);
//! assert_eq!(false, config.verbose);
//! ```
//!
//! ## Features
//!
//! #### Predefined Sources
//! Predefined sources has the following order, [`Salak`] will find by sequence of these orders,
//! if the property with specified key is found at the current source, than return immediately. Otherwise,
//! it will search the next source.
//!
//! 1. Random source provides a group of keys can return random values.
//! * `random.u8`
//! * `random.u16`
//! * `random.u32`
//! * `random.u64`
//! * `random.u128`
//! * `random.usize`
//! * `random.i8`
//! * `random.i16`
//! * `random.i32`
//! * `random.i64`
//! * `random.i128`
//! * `random.isize`
//! 2. Custom arguments source. [`SalakBuilder::set()`] can set a single kv,
//! and [`SalakBuilder::set_args()`] can set a group of kvs.
//! 3. System environment source. Implemented by [`source::system_environment`].
//! 4. Profile specified file source, eg. `app-dev.toml`, supports reloading.
//! 5. No profile file source, eg. `app.toml`, supports reloading.
//! 6. Custom sources, which can register by [`Salak::register()`].
//!
//! #### Key Convention
//! Key is used for search configuration from [`Environment`], normally it is represented by string.
//! Key is a group of SubKey separated by dot(`.`), and SubKey is a name or a name followed by index.
//! 1. SubKey Format (`[a-z][_a-z0-9]+(\[[0-9]+\])*`)
//! * `a`
//! * `a0`
//! * `a_b`
//! * `a[0]`
//! * `a[0][0]`
//! 2. Key Format (`SubKey(\.SubKey)*`)
//! * `a`
//! * `a.b`
//! * `a.val[0]`
//! * `a_b[0]`
//!
//! #### Value Placeholder Parsing
//! 1. Placeholder Format
//! * `${key}` => Get value of `key`.
//! * `${key:default}` => Get value of `key`, if not exists return `default`.
//! 2. Escape Format
//! * `\$\{key\}` => Return `${key}`.
//! * `$`, `\`, `{`, `}` must use escape format.
//!
//! #### Attributes For Derive
//! `salak` supports some attributes for automatically derive [`FromEnvironment`].
//! All attributes have format `#[salak(..)]`, eg. `#[salak(default = "default value")]`.
//! 1. Struct Header Attribute.
//! * `#[salak(prefix = "salak.application")]`, has this attr will auto implement [`PrefixedFromEnvironment`].
//! 2. Struct Field Attribute.
//! * `#[salak(default = "value")]`, this attr can specify default value.
//! * `#[salak(name = "key")]`, this attr can specify property key, default convension is use field name.
//! * `#[salak(desc = "Field Description")]`, this attr can be describe this property.
//!
//! #### Reload Configuration
//! `salak` supports reload configurations. Since in rust mutable
//! and alias can't be used together, here we introduce a wrapper
//! [`wrapper::IORef`] for updating values when reloading.
//!
//! #### Resource Factory
//! [`Resource`] defines a standard way to create instance. [`Factory`] provides functions to initialize resource
//! and cache resource. Please refer to [salak_factory](https://docs.rs/salak_factory) for resource usage.
//! Feature 'app' should be open for this feature.
//!
#![cfg_attr(docsrs, feature(doc_cfg))]
#![warn(
anonymous_parameters,
missing_copy_implementations,
missing_debug_implementations,
missing_docs,
nonstandard_style,
rust_2018_idioms,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unused_extern_crates,
unused_qualifications,
variant_size_differences
)]
use parking_lot::Mutex;
#[cfg(feature = "derive")]
use crate::derive::KeyDesc;
#[cfg(feature = "derive")]
mod derive;
#[cfg(feature = "derive")]
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
pub use crate::derive::{
AutoDeriveFromEnvironment, DescFromEnvironment, PrefixedFromEnvironment, SalakDescContext,
};
use raw_ioref::IORefT;
/// Auto derive [`FromEnvironment`] for struct.
#[cfg(feature = "derive")]
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
pub use salak_derive::FromEnvironment;
/// Auto derive [`Service`] for struct.
#[cfg(all(feature = "derive", feature = "app"))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "derive", feature = "app"))))]
pub use salak_derive::Service;
use source_raw::PropertyRegistryInternal;
#[cfg(feature = "args")]
#[cfg_attr(docsrs, doc(cfg(feature = "args")))]
mod args;
#[cfg(feature = "args")]
#[cfg_attr(docsrs, doc(cfg(feature = "args")))]
pub use crate::args::AppInfo;
mod err;
mod raw;
use crate::raw::SubKey;
pub use crate::raw::{IsProperty, Property};
mod raw_ioref;
mod raw_vec;
use crate::env::PREFIX;
pub use crate::env::{Salak, SalakBuilder};
mod env;
mod raw_enum;
pub use crate::err::PropertyError;
pub use crate::raw_enum::EnumProperty;
mod source_map;
#[cfg(feature = "rand")]
#[cfg_attr(docsrs, doc(cfg(feature = "rand")))]
mod source_rand;
mod source_raw;
#[cfg(feature = "toml")]
#[cfg_attr(docsrs, doc(cfg(feature = "toml")))]
mod source_toml;
#[cfg(feature = "yaml")]
#[cfg_attr(docsrs, doc(cfg(feature = "yaml")))]
mod source_yaml;
use crate::source::Key;
use crate::source::SubKeys;
#[cfg(feature = "app")]
#[cfg_attr(docsrs, doc(cfg(feature = "app")))]
mod app;
#[cfg(feature = "app")]
#[cfg_attr(docsrs, doc(cfg(feature = "app")))]
pub use crate::app::*;
#[cfg(test)]
#[macro_use(quickcheck)]
extern crate quickcheck_macros;
/// Salak wrapper for configuration parsing.
///
/// Wrapper can determine extra behavior for parsing.
/// Such as check empty of vec or update when reloading.
pub mod wrapper {
pub use crate::raw_ioref::IORef;
pub use crate::raw_vec::NonEmptyVec;
}
/// Salak sources.
///
/// This mod exports all pub sources.
pub mod source {
#[cfg(feature = "args")]
#[cfg_attr(docsrs, doc(cfg(feature = "args")))]
pub(crate) use crate::args::from_args;
pub use crate::raw::Key;
pub use crate::raw::SubKeys;
pub use crate::source_map::system_environment;
pub use crate::source_map::HashMapSource;
}
pub(crate) type Res<T> = Result<T, PropertyError>;
pub(crate) type Void = Res<()>;
/// A property source defines how to load properties.
/// `salak` has some predefined sources, user can
/// provide custom source by implementing this trait.
///
/// Sources provided by `salak`.
///
/// * hashmap source
/// * std::env source
/// * toml source
/// * yaml source
pub trait PropertySource: Send + Sync {
/// [`PropertySource`] name.
fn name(&self) -> &str;
/// Get property by key.
fn get_property(&self, key: &Key<'_>) -> Option<Property<'_>>;
/// Get all subkeys with given key.
///
/// Subkeys are keys without dot('.').
/// This method is unstable, and will be simplified by hidding
/// Key and SubKeys.
fn get_sub_keys<'a>(&'a self, key: &Key<'_>, sub_keys: &mut SubKeys<'a>);
/// Check whether the [`PropertySource`] is empty.
/// Empty source will be ignored when registering to `salak`.
fn is_empty(&self) -> bool;
/// Reload source, if nothing changes, then return none.
#[inline]
fn reload_source(&self) -> Res<Option<Box<dyn PropertySource>>> {
Ok(None)
}
}
/// Environment defines interface for getting values, and reloading
/// configurations.
///
/// The implementor of this trait is [`Salak`].
pub trait Environment {
/// Get value by key.
/// * `key` - Configuration key.
///
/// Require means is if the value `T` is not found,
/// then error will be returned. But if you try to get
/// `Option<T>`, then not found will return `None`.
fn require<T: FromEnvironment>(&self, key: &str) -> Res<T>;
/// Reload configuration. If reloading is completed,
/// all values wrapped by [`wrapper::IORef`] will be updated.
///
/// Currently, this feature is unstable, the returned bool
/// value means reloading is completed without error.
fn reload(&self) -> Res<bool>;
#[cfg(feature = "derive")]
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
#[inline]
/// Get value with predefined key.
///
/// [`PrefixedFromEnvironment`] can be auto derives by
/// [`salak_derive::FromEnvironment`] macro. It provides
/// a standard key for getting value `T`.
fn get<T: PrefixedFromEnvironment>(&self) -> Res<T> {
self.require::<T>(T::prefix())
}
}
/// Context for implementing [`FromEnvironment`].
#[allow(missing_debug_implementations)]
pub struct SalakContext<'a> {
registry: &'a PropertyRegistryInternal<'a>,
iorefs: &'a Mutex<Vec<Box<dyn IORefT + Send>>>,
key: &'a mut Key<'a>,
}
/// Parsing value from environment by [`SalakContext`].
pub trait FromEnvironment: Sized {
/// Generate object from [`SalakContext`].
/// * `val` - Property value can be parsed from.
/// * `env` - Context.
///
/// ```no_run
/// use salak::*;
/// pub struct Config {
/// key: String
/// }
/// impl FromEnvironment for Config {
/// fn from_env(
/// val: Option<Property<'_>>,
/// env: &mut SalakContext<'_>,
/// ) -> Result<Self, PropertyError> {
/// Ok(Self{
/// key: env.require_def("key", None)?,
/// })
/// }
/// }
///
/// ```
fn from_env(val: Option<Property<'_>>, env: &mut SalakContext<'_>) -> Res<Self>;
}
| true |
803d47198803d026358b00492feed23c3c720724
|
Rust
|
trevershick/uni
|
/src/main.rs
|
UTF-8
| 1,356 | 3.03125 | 3 |
[] |
no_license
|
#[macro_use]
extern crate simple_error;
#[macro_use]
extern crate clap;
use std::error::Error;
use std::process::exit;
use std::result::Result;
use clap::App;
use hex;
fn utf8_to_utf16(unicode_bytes: Vec<u8>) -> Result<String, Box<dyn Error>> {
if unicode_bytes.len() != 2 {
bail!("only handle arrays of 2");
}
let mut wide = unicode_bytes[0] as u16;
wide <<= 8;
wide += unicode_bytes[1] as u16;
return match String::from_utf16(&[wide]) {
Ok(v) => Ok(v),
Err(x) => Err(Box::new(x)),
};
}
fn main() {
let matches = App::new("uni")
.version("1.0.0")
.about("Convert unicode hex to unicode character")
.args_from_usage("<hex_vals>... 'A sequence of utf16 hex values, i.e. 30CE B0AB'")
.get_matches();
let hex_values = values_t!(matches, "hex_vals", String).unwrap();
let mut bad: Vec<String> = Vec::new();
for hex_value in hex_values {
let decoded = hex::decode(&hex_value);
match decoded {
Ok(unicode_bytes) => match utf8_to_utf16(unicode_bytes) {
Ok(v) => print!("{}", v),
Err(_) => bad.push(hex_value),
},
Err(_) => bad.push(hex_value),
};
}
for b in &bad {
eprintln!("bad code {}", b);
}
if bad.len() > 0 {
exit(1)
}
}
| true |
9f50f066cda213a8677e45e00a4d365757896afe
|
Rust
|
embed-rs/stm32f7x6
|
/src/ethernet_mmc/mmccr/mod.rs
|
UTF-8
| 19,766 | 2.921875 | 3 |
[] |
no_license
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::MMCCR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `CR`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CRR {
#[doc = "Reset all counters. Cleared automatically"]
RESET,
#[doc = r" Reserved"]
_Reserved(bool),
}
impl CRR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
CRR::RESET => true,
CRR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> CRR {
match value {
true => CRR::RESET,
i => CRR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `RESET`"]
#[inline]
pub fn is_reset(&self) -> bool {
*self == CRR::RESET
}
}
#[doc = "Possible values of the field `CSR`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CSRR {
#[doc = "Counters roll over to zero after reaching the maximum value"]
DISABLED,
#[doc = "Counters do not roll over to zero after reaching the maximum value"]
ENABLED,
}
impl CSRR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
CSRR::DISABLED => false,
CSRR::ENABLED => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> CSRR {
match value {
false => CSRR::DISABLED,
true => CSRR::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline]
pub fn is_disabled(&self) -> bool {
*self == CSRR::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline]
pub fn is_enabled(&self) -> bool {
*self == CSRR::ENABLED
}
}
#[doc = "Possible values of the field `ROR`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RORR {
#[doc = "MMC counters do not reset on read"]
DISABLED,
#[doc = "MMC counters reset to zero after read"]
ENABLED,
}
impl RORR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
RORR::DISABLED => false,
RORR::ENABLED => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> RORR {
match value {
false => RORR::DISABLED,
true => RORR::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline]
pub fn is_disabled(&self) -> bool {
*self == RORR::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline]
pub fn is_enabled(&self) -> bool {
*self == RORR::ENABLED
}
}
#[doc = "Possible values of the field `MCF`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MCFR {
#[doc = "All MMC counters update normally"]
UNFROZEN,
#[doc = "All MMC counters frozen to their current value"]
FROZEN,
}
impl MCFR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
MCFR::UNFROZEN => false,
MCFR::FROZEN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> MCFR {
match value {
false => MCFR::UNFROZEN,
true => MCFR::FROZEN,
}
}
#[doc = "Checks if the value of the field is `UNFROZEN`"]
#[inline]
pub fn is_unfrozen(&self) -> bool {
*self == MCFR::UNFROZEN
}
#[doc = "Checks if the value of the field is `FROZEN`"]
#[inline]
pub fn is_frozen(&self) -> bool {
*self == MCFR::FROZEN
}
}
#[doc = "Possible values of the field `MCP`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MCPR {
#[doc = "MMC counters will be preset to almost full or almost half. Cleared automatically"]
PRESET,
#[doc = r" Reserved"]
_Reserved(bool),
}
impl MCPR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
MCPR::PRESET => true,
MCPR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> MCPR {
match value {
true => MCPR::PRESET,
i => MCPR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `PRESET`"]
#[inline]
pub fn is_preset(&self) -> bool {
*self == MCPR::PRESET
}
}
#[doc = "Possible values of the field `MCFHP`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MCFHPR {
#[doc = "When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0"]
ALMOSTHALF,
#[doc = "When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0"]
ALMOSTFULL,
}
impl MCFHPR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
MCFHPR::ALMOSTHALF => false,
MCFHPR::ALMOSTFULL => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> MCFHPR {
match value {
false => MCFHPR::ALMOSTHALF,
true => MCFHPR::ALMOSTFULL,
}
}
#[doc = "Checks if the value of the field is `ALMOSTHALF`"]
#[inline]
pub fn is_almost_half(&self) -> bool {
*self == MCFHPR::ALMOSTHALF
}
#[doc = "Checks if the value of the field is `ALMOSTFULL`"]
#[inline]
pub fn is_almost_full(&self) -> bool {
*self == MCFHPR::ALMOSTFULL
}
}
#[doc = "Values that can be written to the field `CR`"]
pub enum CRW {
#[doc = "Reset all counters. Cleared automatically"]
RESET,
}
impl CRW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
CRW::RESET => true,
}
}
}
#[doc = r" Proxy"]
pub struct _CRW<'a> {
w: &'a mut W,
}
impl<'a> _CRW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CRW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Reset all counters. Cleared automatically"]
#[inline]
pub fn reset(self) -> &'a mut W {
self.variant(CRW::RESET)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CSR`"]
pub enum CSRW {
#[doc = "Counters roll over to zero after reaching the maximum value"]
DISABLED,
#[doc = "Counters do not roll over to zero after reaching the maximum value"]
ENABLED,
}
impl CSRW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
CSRW::DISABLED => false,
CSRW::ENABLED => true,
}
}
}
#[doc = r" Proxy"]
pub struct _CSRW<'a> {
w: &'a mut W,
}
impl<'a> _CSRW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CSRW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Counters roll over to zero after reaching the maximum value"]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(CSRW::DISABLED)
}
#[doc = "Counters do not roll over to zero after reaching the maximum value"]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(CSRW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ROR`"]
pub enum RORW {
#[doc = "MMC counters do not reset on read"]
DISABLED,
#[doc = "MMC counters reset to zero after read"]
ENABLED,
}
impl RORW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
RORW::DISABLED => false,
RORW::ENABLED => true,
}
}
}
#[doc = r" Proxy"]
pub struct _RORW<'a> {
w: &'a mut W,
}
impl<'a> _RORW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: RORW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "MMC counters do not reset on read"]
#[inline]
pub fn disabled(self) -> &'a mut W {
self.variant(RORW::DISABLED)
}
#[doc = "MMC counters reset to zero after read"]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(RORW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `MCF`"]
pub enum MCFW {
#[doc = "All MMC counters update normally"]
UNFROZEN,
#[doc = "All MMC counters frozen to their current value"]
FROZEN,
}
impl MCFW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
MCFW::UNFROZEN => false,
MCFW::FROZEN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _MCFW<'a> {
w: &'a mut W,
}
impl<'a> _MCFW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: MCFW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "All MMC counters update normally"]
#[inline]
pub fn unfrozen(self) -> &'a mut W {
self.variant(MCFW::UNFROZEN)
}
#[doc = "All MMC counters frozen to their current value"]
#[inline]
pub fn frozen(self) -> &'a mut W {
self.variant(MCFW::FROZEN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `MCP`"]
pub enum MCPW {
#[doc = "MMC counters will be preset to almost full or almost half. Cleared automatically"]
PRESET,
}
impl MCPW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
MCPW::PRESET => true,
}
}
}
#[doc = r" Proxy"]
pub struct _MCPW<'a> {
w: &'a mut W,
}
impl<'a> _MCPW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: MCPW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "MMC counters will be preset to almost full or almost half. Cleared automatically"]
#[inline]
pub fn preset(self) -> &'a mut W {
self.variant(MCPW::PRESET)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `MCFHP`"]
pub enum MCFHPW {
#[doc = "When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0"]
ALMOSTHALF,
#[doc = "When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0"]
ALMOSTFULL,
}
impl MCFHPW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
MCFHPW::ALMOSTHALF => false,
MCFHPW::ALMOSTFULL => true,
}
}
}
#[doc = r" Proxy"]
pub struct _MCFHPW<'a> {
w: &'a mut W,
}
impl<'a> _MCFHPW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: MCFHPW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0"]
#[inline]
pub fn almost_half(self) -> &'a mut W {
self.variant(MCFHPW::ALMOSTHALF)
}
#[doc = "When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0"]
#[inline]
pub fn almost_full(self) -> &'a mut W {
self.variant(MCFHPW::ALMOSTFULL)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - CR"]
#[inline]
pub fn cr(&self) -> CRR {
CRR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - CSR"]
#[inline]
pub fn csr(&self) -> CSRR {
CSRR::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - ROR"]
#[inline]
pub fn ror(&self) -> RORR {
RORR::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 3 - MCF"]
#[inline]
pub fn mcf(&self) -> MCFR {
MCFR::_from({
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - MCP"]
#[inline]
pub fn mcp(&self) -> MCPR {
MCPR::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 5 - MCFHP"]
#[inline]
pub fn mcfhp(&self) -> MCFHPR {
MCFHPR::_from({
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - CR"]
#[inline]
pub fn cr(&mut self) -> _CRW {
_CRW { w: self }
}
#[doc = "Bit 1 - CSR"]
#[inline]
pub fn csr(&mut self) -> _CSRW {
_CSRW { w: self }
}
#[doc = "Bit 2 - ROR"]
#[inline]
pub fn ror(&mut self) -> _RORW {
_RORW { w: self }
}
#[doc = "Bit 3 - MCF"]
#[inline]
pub fn mcf(&mut self) -> _MCFW {
_MCFW { w: self }
}
#[doc = "Bit 4 - MCP"]
#[inline]
pub fn mcp(&mut self) -> _MCPW {
_MCPW { w: self }
}
#[doc = "Bit 5 - MCFHP"]
#[inline]
pub fn mcfhp(&mut self) -> _MCFHPW {
_MCFHPW { w: self }
}
}
| true |
e293abe837acee93c3c204016fac4b18bdcef1cf
|
Rust
|
astro/rust-lpc43xx
|
/src/scu/pintsel0/mod.rs
|
UTF-8
| 30,148 | 2.53125 | 3 |
[
"Apache-2.0"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PINTSEL0 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct INTPIN0R {
bits: u8,
}
impl INTPIN0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Possible values of the field `PORTSEL0`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PORTSEL0R {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PORTSEL0R::GPIO_PORT_0 => 0,
PORTSEL0R::GPIO_PORT_1 => 1,
PORTSEL0R::GPIO_PORT_2 => 2,
PORTSEL0R::GPIO_PORT_3 => 3,
PORTSEL0R::GPIO_PORT_4 => 4,
PORTSEL0R::GPIO_PORT_5 => 5,
PORTSEL0R::GPIO_PORT_6 => 6,
PORTSEL0R::GPIO_PORT_7 => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PORTSEL0R {
match value {
0 => PORTSEL0R::GPIO_PORT_0,
1 => PORTSEL0R::GPIO_PORT_1,
2 => PORTSEL0R::GPIO_PORT_2,
3 => PORTSEL0R::GPIO_PORT_3,
4 => PORTSEL0R::GPIO_PORT_4,
5 => PORTSEL0R::GPIO_PORT_5,
6 => PORTSEL0R::GPIO_PORT_6,
7 => PORTSEL0R::GPIO_PORT_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `GPIO_PORT_0`"]
#[inline]
pub fn is_gpio_port_0(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_0
}
#[doc = "Checks if the value of the field is `GPIO_PORT_1`"]
#[inline]
pub fn is_gpio_port_1(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_1
}
#[doc = "Checks if the value of the field is `GPIO_PORT_2`"]
#[inline]
pub fn is_gpio_port_2(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_2
}
#[doc = "Checks if the value of the field is `GPIO_PORT_3`"]
#[inline]
pub fn is_gpio_port_3(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_3
}
#[doc = "Checks if the value of the field is `GPIO_PORT_4`"]
#[inline]
pub fn is_gpio_port_4(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_4
}
#[doc = "Checks if the value of the field is `GPIO_PORT_5`"]
#[inline]
pub fn is_gpio_port_5(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_5
}
#[doc = "Checks if the value of the field is `GPIO_PORT_6`"]
#[inline]
pub fn is_gpio_port_6(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_6
}
#[doc = "Checks if the value of the field is `GPIO_PORT_7`"]
#[inline]
pub fn is_gpio_port_7(&self) -> bool {
*self == PORTSEL0R::GPIO_PORT_7
}
}
#[doc = r" Value of the field"]
pub struct INTPIN1R {
bits: u8,
}
impl INTPIN1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Possible values of the field `PORTSEL1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PORTSEL1R {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PORTSEL1R::GPIO_PORT_0 => 0,
PORTSEL1R::GPIO_PORT_1 => 1,
PORTSEL1R::GPIO_PORT_2 => 2,
PORTSEL1R::GPIO_PORT_3 => 3,
PORTSEL1R::GPIO_PORT_4 => 4,
PORTSEL1R::GPIO_PORT_5 => 5,
PORTSEL1R::GPIO_PORT_6 => 6,
PORTSEL1R::GPIO_PORT_7 => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PORTSEL1R {
match value {
0 => PORTSEL1R::GPIO_PORT_0,
1 => PORTSEL1R::GPIO_PORT_1,
2 => PORTSEL1R::GPIO_PORT_2,
3 => PORTSEL1R::GPIO_PORT_3,
4 => PORTSEL1R::GPIO_PORT_4,
5 => PORTSEL1R::GPIO_PORT_5,
6 => PORTSEL1R::GPIO_PORT_6,
7 => PORTSEL1R::GPIO_PORT_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `GPIO_PORT_0`"]
#[inline]
pub fn is_gpio_port_0(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_0
}
#[doc = "Checks if the value of the field is `GPIO_PORT_1`"]
#[inline]
pub fn is_gpio_port_1(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_1
}
#[doc = "Checks if the value of the field is `GPIO_PORT_2`"]
#[inline]
pub fn is_gpio_port_2(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_2
}
#[doc = "Checks if the value of the field is `GPIO_PORT_3`"]
#[inline]
pub fn is_gpio_port_3(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_3
}
#[doc = "Checks if the value of the field is `GPIO_PORT_4`"]
#[inline]
pub fn is_gpio_port_4(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_4
}
#[doc = "Checks if the value of the field is `GPIO_PORT_5`"]
#[inline]
pub fn is_gpio_port_5(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_5
}
#[doc = "Checks if the value of the field is `GPIO_PORT_6`"]
#[inline]
pub fn is_gpio_port_6(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_6
}
#[doc = "Checks if the value of the field is `GPIO_PORT_7`"]
#[inline]
pub fn is_gpio_port_7(&self) -> bool {
*self == PORTSEL1R::GPIO_PORT_7
}
}
#[doc = r" Value of the field"]
pub struct INTPIN2R {
bits: u8,
}
impl INTPIN2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Possible values of the field `PORTSEL2`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PORTSEL2R {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PORTSEL2R::GPIO_PORT_0 => 0,
PORTSEL2R::GPIO_PORT_1 => 1,
PORTSEL2R::GPIO_PORT_2 => 2,
PORTSEL2R::GPIO_PORT_3 => 3,
PORTSEL2R::GPIO_PORT_4 => 4,
PORTSEL2R::GPIO_PORT_5 => 5,
PORTSEL2R::GPIO_PORT_6 => 6,
PORTSEL2R::GPIO_PORT_7 => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PORTSEL2R {
match value {
0 => PORTSEL2R::GPIO_PORT_0,
1 => PORTSEL2R::GPIO_PORT_1,
2 => PORTSEL2R::GPIO_PORT_2,
3 => PORTSEL2R::GPIO_PORT_3,
4 => PORTSEL2R::GPIO_PORT_4,
5 => PORTSEL2R::GPIO_PORT_5,
6 => PORTSEL2R::GPIO_PORT_6,
7 => PORTSEL2R::GPIO_PORT_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `GPIO_PORT_0`"]
#[inline]
pub fn is_gpio_port_0(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_0
}
#[doc = "Checks if the value of the field is `GPIO_PORT_1`"]
#[inline]
pub fn is_gpio_port_1(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_1
}
#[doc = "Checks if the value of the field is `GPIO_PORT_2`"]
#[inline]
pub fn is_gpio_port_2(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_2
}
#[doc = "Checks if the value of the field is `GPIO_PORT_3`"]
#[inline]
pub fn is_gpio_port_3(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_3
}
#[doc = "Checks if the value of the field is `GPIO_PORT_4`"]
#[inline]
pub fn is_gpio_port_4(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_4
}
#[doc = "Checks if the value of the field is `GPIO_PORT_5`"]
#[inline]
pub fn is_gpio_port_5(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_5
}
#[doc = "Checks if the value of the field is `GPIO_PORT_6`"]
#[inline]
pub fn is_gpio_port_6(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_6
}
#[doc = "Checks if the value of the field is `GPIO_PORT_7`"]
#[inline]
pub fn is_gpio_port_7(&self) -> bool {
*self == PORTSEL2R::GPIO_PORT_7
}
}
#[doc = r" Value of the field"]
pub struct INTPIN3R {
bits: u8,
}
impl INTPIN3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Possible values of the field `PORTSEL3`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PORTSEL3R {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PORTSEL3R::GPIO_PORT_0 => 0,
PORTSEL3R::GPIO_PORT_1 => 1,
PORTSEL3R::GPIO_PORT_2 => 2,
PORTSEL3R::GPIO_PORT_3 => 3,
PORTSEL3R::GPIO_PORT_4 => 4,
PORTSEL3R::GPIO_PORT_5 => 5,
PORTSEL3R::GPIO_PORT_6 => 6,
PORTSEL3R::GPIO_PORT_7 => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PORTSEL3R {
match value {
0 => PORTSEL3R::GPIO_PORT_0,
1 => PORTSEL3R::GPIO_PORT_1,
2 => PORTSEL3R::GPIO_PORT_2,
3 => PORTSEL3R::GPIO_PORT_3,
4 => PORTSEL3R::GPIO_PORT_4,
5 => PORTSEL3R::GPIO_PORT_5,
6 => PORTSEL3R::GPIO_PORT_6,
7 => PORTSEL3R::GPIO_PORT_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `GPIO_PORT_0`"]
#[inline]
pub fn is_gpio_port_0(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_0
}
#[doc = "Checks if the value of the field is `GPIO_PORT_1`"]
#[inline]
pub fn is_gpio_port_1(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_1
}
#[doc = "Checks if the value of the field is `GPIO_PORT_2`"]
#[inline]
pub fn is_gpio_port_2(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_2
}
#[doc = "Checks if the value of the field is `GPIO_PORT_3`"]
#[inline]
pub fn is_gpio_port_3(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_3
}
#[doc = "Checks if the value of the field is `GPIO_PORT_4`"]
#[inline]
pub fn is_gpio_port_4(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_4
}
#[doc = "Checks if the value of the field is `GPIO_PORT_5`"]
#[inline]
pub fn is_gpio_port_5(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_5
}
#[doc = "Checks if the value of the field is `GPIO_PORT_6`"]
#[inline]
pub fn is_gpio_port_6(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_6
}
#[doc = "Checks if the value of the field is `GPIO_PORT_7`"]
#[inline]
pub fn is_gpio_port_7(&self) -> bool {
*self == PORTSEL3R::GPIO_PORT_7
}
}
#[doc = r" Proxy"]
pub struct _INTPIN0W<'a> {
w: &'a mut W,
}
impl<'a> _INTPIN0W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PORTSEL0`"]
pub enum PORTSEL0W {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL0W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PORTSEL0W::GPIO_PORT_0 => 0,
PORTSEL0W::GPIO_PORT_1 => 1,
PORTSEL0W::GPIO_PORT_2 => 2,
PORTSEL0W::GPIO_PORT_3 => 3,
PORTSEL0W::GPIO_PORT_4 => 4,
PORTSEL0W::GPIO_PORT_5 => 5,
PORTSEL0W::GPIO_PORT_6 => 6,
PORTSEL0W::GPIO_PORT_7 => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PORTSEL0W<'a> {
w: &'a mut W,
}
impl<'a> _PORTSEL0W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PORTSEL0W) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "GPIO Port 0"]
#[inline]
pub fn gpio_port_0(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_0)
}
#[doc = "GPIO Port 1"]
#[inline]
pub fn gpio_port_1(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_1)
}
#[doc = "GPIO Port 2"]
#[inline]
pub fn gpio_port_2(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_2)
}
#[doc = "GPIO Port 3"]
#[inline]
pub fn gpio_port_3(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_3)
}
#[doc = "GPIO Port 4"]
#[inline]
pub fn gpio_port_4(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_4)
}
#[doc = "GPIO Port 5"]
#[inline]
pub fn gpio_port_5(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_5)
}
#[doc = "GPIO Port 6"]
#[inline]
pub fn gpio_port_6(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_6)
}
#[doc = "GPIO Port 7"]
#[inline]
pub fn gpio_port_7(self) -> &'a mut W {
self.variant(PORTSEL0W::GPIO_PORT_7)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _INTPIN1W<'a> {
w: &'a mut W,
}
impl<'a> _INTPIN1W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PORTSEL1`"]
pub enum PORTSEL1W {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL1W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PORTSEL1W::GPIO_PORT_0 => 0,
PORTSEL1W::GPIO_PORT_1 => 1,
PORTSEL1W::GPIO_PORT_2 => 2,
PORTSEL1W::GPIO_PORT_3 => 3,
PORTSEL1W::GPIO_PORT_4 => 4,
PORTSEL1W::GPIO_PORT_5 => 5,
PORTSEL1W::GPIO_PORT_6 => 6,
PORTSEL1W::GPIO_PORT_7 => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PORTSEL1W<'a> {
w: &'a mut W,
}
impl<'a> _PORTSEL1W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PORTSEL1W) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "GPIO Port 0"]
#[inline]
pub fn gpio_port_0(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_0)
}
#[doc = "GPIO Port 1"]
#[inline]
pub fn gpio_port_1(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_1)
}
#[doc = "GPIO Port 2"]
#[inline]
pub fn gpio_port_2(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_2)
}
#[doc = "GPIO Port 3"]
#[inline]
pub fn gpio_port_3(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_3)
}
#[doc = "GPIO Port 4"]
#[inline]
pub fn gpio_port_4(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_4)
}
#[doc = "GPIO Port 5"]
#[inline]
pub fn gpio_port_5(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_5)
}
#[doc = "GPIO Port 6"]
#[inline]
pub fn gpio_port_6(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_6)
}
#[doc = "GPIO Port 7"]
#[inline]
pub fn gpio_port_7(self) -> &'a mut W {
self.variant(PORTSEL1W::GPIO_PORT_7)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _INTPIN2W<'a> {
w: &'a mut W,
}
impl<'a> _INTPIN2W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PORTSEL2`"]
pub enum PORTSEL2W {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL2W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PORTSEL2W::GPIO_PORT_0 => 0,
PORTSEL2W::GPIO_PORT_1 => 1,
PORTSEL2W::GPIO_PORT_2 => 2,
PORTSEL2W::GPIO_PORT_3 => 3,
PORTSEL2W::GPIO_PORT_4 => 4,
PORTSEL2W::GPIO_PORT_5 => 5,
PORTSEL2W::GPIO_PORT_6 => 6,
PORTSEL2W::GPIO_PORT_7 => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PORTSEL2W<'a> {
w: &'a mut W,
}
impl<'a> _PORTSEL2W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PORTSEL2W) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "GPIO Port 0"]
#[inline]
pub fn gpio_port_0(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_0)
}
#[doc = "GPIO Port 1"]
#[inline]
pub fn gpio_port_1(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_1)
}
#[doc = "GPIO Port 2"]
#[inline]
pub fn gpio_port_2(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_2)
}
#[doc = "GPIO Port 3"]
#[inline]
pub fn gpio_port_3(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_3)
}
#[doc = "GPIO Port 4"]
#[inline]
pub fn gpio_port_4(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_4)
}
#[doc = "GPIO Port 5"]
#[inline]
pub fn gpio_port_5(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_5)
}
#[doc = "GPIO Port 6"]
#[inline]
pub fn gpio_port_6(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_6)
}
#[doc = "GPIO Port 7"]
#[inline]
pub fn gpio_port_7(self) -> &'a mut W {
self.variant(PORTSEL2W::GPIO_PORT_7)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _INTPIN3W<'a> {
w: &'a mut W,
}
impl<'a> _INTPIN3W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PORTSEL3`"]
pub enum PORTSEL3W {
#[doc = "GPIO Port 0"]
GPIO_PORT_0,
#[doc = "GPIO Port 1"]
GPIO_PORT_1,
#[doc = "GPIO Port 2"]
GPIO_PORT_2,
#[doc = "GPIO Port 3"]
GPIO_PORT_3,
#[doc = "GPIO Port 4"]
GPIO_PORT_4,
#[doc = "GPIO Port 5"]
GPIO_PORT_5,
#[doc = "GPIO Port 6"]
GPIO_PORT_6,
#[doc = "GPIO Port 7"]
GPIO_PORT_7,
}
impl PORTSEL3W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PORTSEL3W::GPIO_PORT_0 => 0,
PORTSEL3W::GPIO_PORT_1 => 1,
PORTSEL3W::GPIO_PORT_2 => 2,
PORTSEL3W::GPIO_PORT_3 => 3,
PORTSEL3W::GPIO_PORT_4 => 4,
PORTSEL3W::GPIO_PORT_5 => 5,
PORTSEL3W::GPIO_PORT_6 => 6,
PORTSEL3W::GPIO_PORT_7 => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PORTSEL3W<'a> {
w: &'a mut W,
}
impl<'a> _PORTSEL3W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PORTSEL3W) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "GPIO Port 0"]
#[inline]
pub fn gpio_port_0(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_0)
}
#[doc = "GPIO Port 1"]
#[inline]
pub fn gpio_port_1(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_1)
}
#[doc = "GPIO Port 2"]
#[inline]
pub fn gpio_port_2(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_2)
}
#[doc = "GPIO Port 3"]
#[inline]
pub fn gpio_port_3(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_3)
}
#[doc = "GPIO Port 4"]
#[inline]
pub fn gpio_port_4(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_4)
}
#[doc = "GPIO Port 5"]
#[inline]
pub fn gpio_port_5(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_5)
}
#[doc = "GPIO Port 6"]
#[inline]
pub fn gpio_port_6(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_6)
}
#[doc = "GPIO Port 7"]
#[inline]
pub fn gpio_port_7(self) -> &'a mut W {
self.variant(PORTSEL3W::GPIO_PORT_7)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:4 - Pint interrupt 0: Select the pin number within the GPIO port selected by the PORTSEL0 bit in this register."]
#[inline]
pub fn intpin0(&self) -> INTPIN0R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
INTPIN0R { bits }
}
#[doc = "Bits 5:7 - Pin interrupt 0: Select the port for the pin number to be selected in the INTPIN0 bits of this register."]
#[inline]
pub fn portsel0(&self) -> PORTSEL0R {
PORTSEL0R::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 8:12 - Pint interrupt 1: Select the pin number within the GPIO port selected by the PORTSEL1 bit in this register."]
#[inline]
pub fn intpin1(&self) -> INTPIN1R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) as u8
};
INTPIN1R { bits }
}
#[doc = "Bits 13:15 - Pin interrupt 1: Select the port for the pin number to be selected in the INTPIN1 bits of this register."]
#[inline]
pub fn portsel1(&self) -> PORTSEL1R {
PORTSEL1R::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 16:20 - Pint interrupt 2: Select the pin number within the GPIO port selected by the PORTSEL2 bit in this register."]
#[inline]
pub fn intpin2(&self) -> INTPIN2R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u8
};
INTPIN2R { bits }
}
#[doc = "Bits 21:23 - Pin interrupt 2: Select the port for the pin number to be selected in the INTPIN2 bits of this register."]
#[inline]
pub fn portsel2(&self) -> PORTSEL2R {
PORTSEL2R::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 24:28 - Pint interrupt 3: Select the pin number within the GPIO port selected by the PORTSEL3 bit in this register."]
#[inline]
pub fn intpin3(&self) -> INTPIN3R {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
};
INTPIN3R { bits }
}
#[doc = "Bits 29:31 - Pin interrupt 3: Select the port for the pin number to be selected in the INTPIN3 bits of this register."]
#[inline]
pub fn portsel3(&self) -> PORTSEL3R {
PORTSEL3R::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:4 - Pint interrupt 0: Select the pin number within the GPIO port selected by the PORTSEL0 bit in this register."]
#[inline]
pub fn intpin0(&mut self) -> _INTPIN0W {
_INTPIN0W { w: self }
}
#[doc = "Bits 5:7 - Pin interrupt 0: Select the port for the pin number to be selected in the INTPIN0 bits of this register."]
#[inline]
pub fn portsel0(&mut self) -> _PORTSEL0W {
_PORTSEL0W { w: self }
}
#[doc = "Bits 8:12 - Pint interrupt 1: Select the pin number within the GPIO port selected by the PORTSEL1 bit in this register."]
#[inline]
pub fn intpin1(&mut self) -> _INTPIN1W {
_INTPIN1W { w: self }
}
#[doc = "Bits 13:15 - Pin interrupt 1: Select the port for the pin number to be selected in the INTPIN1 bits of this register."]
#[inline]
pub fn portsel1(&mut self) -> _PORTSEL1W {
_PORTSEL1W { w: self }
}
#[doc = "Bits 16:20 - Pint interrupt 2: Select the pin number within the GPIO port selected by the PORTSEL2 bit in this register."]
#[inline]
pub fn intpin2(&mut self) -> _INTPIN2W {
_INTPIN2W { w: self }
}
#[doc = "Bits 21:23 - Pin interrupt 2: Select the port for the pin number to be selected in the INTPIN2 bits of this register."]
#[inline]
pub fn portsel2(&mut self) -> _PORTSEL2W {
_PORTSEL2W { w: self }
}
#[doc = "Bits 24:28 - Pint interrupt 3: Select the pin number within the GPIO port selected by the PORTSEL3 bit in this register."]
#[inline]
pub fn intpin3(&mut self) -> _INTPIN3W {
_INTPIN3W { w: self }
}
#[doc = "Bits 29:31 - Pin interrupt 3: Select the port for the pin number to be selected in the INTPIN3 bits of this register."]
#[inline]
pub fn portsel3(&mut self) -> _PORTSEL3W {
_PORTSEL3W { w: self }
}
}
| true |
161a56edb3db6f76bd0d42056e768fd9b162ee5d
|
Rust
|
zfzackfrost/string_studio
|
/src/generate/regex_gen.rs
|
UTF-8
| 6,496 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
use rand::distributions::{Distribution, Uniform};
use rand::prelude::*;
use regex_syntax::hir::{self, Hir, HirKind};
use regex_syntax::Parser;
use std::iter::FromIterator;
use encoding::all::UTF_8;
use encoding::{DecoderTrap, EncoderTrap, Encoding};
const MAX_REPEAT: u32 = 100;
struct RandomizeState<'a, R: Rng> {
pub rng: &'a mut R,
}
fn randomize_alternation<R: Rng>(
rstate: &mut RandomizeState<R>,
mut exprs: Vec<Hir>,
) -> Result<String, ()> {
exprs.shuffle(rstate.rng);
if !exprs.is_empty() {
randomize_for(rstate, exprs[0].kind().clone())
} else {
Err(())
}
}
fn randomize_word_boundry<R: Rng>(
_rstate: &mut RandomizeState<R>,
_wb: hir::WordBoundary,
) -> Result<String, ()> {
Ok(String::from(" "))
}
fn randomize_anchor<R: Rng>(
_rstate: &mut RandomizeState<R>,
_anchor: hir::Anchor,
) -> Result<String, ()> {
Ok(String::from(""))
}
fn randomize_group<R: Rng>(
rstate: &mut RandomizeState<R>,
group: hir::Group,
) -> Result<String, ()> {
randomize_for(rstate, group.hir.kind().clone())
}
fn randomize_literal<R: Rng>(
_rstate: &mut RandomizeState<R>,
literal: hir::Literal,
) -> Result<String, ()> {
match literal {
hir::Literal::Unicode(c) => Ok(String::from_iter([c].iter())),
hir::Literal::Byte(_) => Err(()),
}
}
fn randomize_concat<R: Rng>(rstate: &mut RandomizeState<R>, exprs: Vec<Hir>) -> Result<String, ()> {
let mut s = String::new();
for e in &exprs {
s += &randomize_for(rstate, e.kind().clone())?;
}
Ok(s)
}
fn repeat_exactly<R: Rng>(rstate: &mut RandomizeState<R>, h: Hir, n: u32) -> Result<String, ()> {
let s = (0..n)
.map(|_| randomize_for(rstate, h.kind().clone()).unwrap())
.collect::<Vec<String>>()
.join("");
Ok(s)
}
fn repeat_at_least<R: Rng + RngCore>(
rstate: &mut RandomizeState<R>,
h: Hir,
n: u32,
) -> Result<String, ()> {
let dist = Uniform::from(n..MAX_REPEAT);
let n = dist.sample(rstate.rng);
let s = (0..n)
.map(|_| randomize_for(rstate, h.kind().clone()).unwrap())
.collect::<Vec<String>>()
.join("");
Ok(s)
}
fn repeat_bounded<R: Rng + RngCore>(
rstate: &mut RandomizeState<R>,
h: Hir,
mn: u32,
mx: u32,
) -> Result<String, ()> {
let mx = mx + 1;
let dist = Uniform::from(mn..mx);
let n = dist.sample(rstate.rng);
let s = (0..n)
.map(|_| randomize_for(rstate, h.kind().clone()).unwrap())
.collect::<Vec<String>>()
.join("");
Ok(s)
}
fn randomize_unicode_class<R: Rng + RngCore>(
rstate: &mut RandomizeState<R>,
cls: hir::ClassUnicode,
) -> Result<String, ()> {
let mut chars: Vec<char> = Vec::new();
for r in cls.iter() {
let s = r.start();
let e = r.end();
if let (Ok(s), Ok(e)) = (
UTF_8.encode(&String::from_iter([s].iter()), EncoderTrap::Strict),
UTF_8.encode(&String::from_iter([e].iter()), EncoderTrap::Strict),
) {
if s.len() > 0 && e.len() > 0 {
let s = s[0];
let e = e[0] + 1;
for byte in s..e {
if let Ok(s) = UTF_8.decode(&[byte], DecoderTrap::Strict) {
let c = s.chars().nth(0).unwrap();
chars.push(c);
}
}
}
}
}
Ok(String::from_iter(&[*chars.choose(rstate.rng).unwrap()]))
}
fn randomize_class<R: Rng + RngCore>(
rstate: &mut RandomizeState<R>,
cls: hir::Class,
) -> Result<String, ()> {
match cls {
hir::Class::Unicode(cls) => randomize_unicode_class(rstate, cls),
_ => Err(()),
}
}
fn randomize_repetition<R: Rng>(
rstate: &mut RandomizeState<R>,
rep: hir::Repetition,
) -> Result<String, ()> {
let hir = rep.hir;
match rep.kind {
hir::RepetitionKind::ZeroOrOne => repeat_bounded(rstate, hir.as_ref().clone(), 0, 1),
hir::RepetitionKind::ZeroOrMore => {
repeat_bounded(rstate, hir.as_ref().clone(), 0, MAX_REPEAT)
}
hir::RepetitionKind::OneOrMore => {
repeat_bounded(rstate, hir.as_ref().clone(), 1, MAX_REPEAT)
}
hir::RepetitionKind::Range(range) => match range {
hir::RepetitionRange::Exactly(n) => repeat_exactly(rstate, hir.as_ref().clone(), n),
hir::RepetitionRange::AtLeast(n) => repeat_at_least(rstate, hir.as_ref().clone(), n),
hir::RepetitionRange::Bounded(mn, mx) => {
repeat_bounded(rstate, hir.as_ref().clone(), mn, mx)
}
},
}
}
fn randomize_for<R: Rng>(rstate: &mut RandomizeState<R>, kind: HirKind) -> Result<String, ()> {
match kind {
HirKind::Alternation(exprs) => randomize_alternation(rstate, exprs),
HirKind::Literal(lit) => randomize_literal(rstate, lit),
HirKind::Concat(exprs) => randomize_concat(rstate, exprs),
HirKind::Repetition(rep) => randomize_repetition(rstate, rep),
HirKind::Group(grp) => randomize_group(rstate, grp),
HirKind::Class(cls) => randomize_class(rstate, cls),
HirKind::Anchor(a) => randomize_anchor(rstate, a),
HirKind::WordBoundary(wb) => randomize_word_boundry(rstate, wb),
_ => Err(()),
}
}
pub struct RegexGen {
hir: Hir,
}
impl RegexGen {
pub fn new(pattern: &str) -> Option<Self> {
if let Ok(hir) = Parser::new().parse(pattern) {
Some(Self { hir })
} else {
None
}
}
pub fn kind(&self) -> &HirKind {
self.hir.kind()
}
pub fn randomize(&self, rng: &mut impl Rng) -> Result<String, ()> {
let mut rstate = RandomizeState { rng: rng };
randomize_for(&mut rstate, self.kind().clone())
}
}
#[cfg(test)]
mod tests {
use super::*;
use rand_xoshiro::Xoshiro256StarStar;
#[test]
fn hir_randomize_test() {
let mut rng = Xoshiro256StarStar::seed_from_u64(0);
let gen = RegexGen::new("([a-zA-Z]){1,3}").unwrap();
if let Ok(s) = gen.randomize(&mut rng) {
println!("{}", s);
}
}
#[test]
fn hir_parser_test() {
let hir = Parser::new().parse("a|b").unwrap();
assert_eq!(
hir,
Hir::alternation(vec![
Hir::literal(hir::Literal::Unicode('a')),
Hir::literal(hir::Literal::Unicode('b')),
])
);
}
}
| true |
17931b343d778a5308fb58c83b860016ccc33223
|
Rust
|
garciparedes/google-hashcode-2021
|
/src/main.rs
|
UTF-8
| 6,015 | 3.28125 | 3 |
[] |
no_license
|
use std::io::prelude::*;
use std::io;
use std::collections::{HashMap, HashSet};
fn main() -> io::Result<()> {
let input = read_input()?;
let mut solver = Solver::from_str(&input);
// println!("{:?}", solver);
let solution = solver.solve();
write_output(solution);
return Ok(());
}
#[derive(Debug)]
struct Solver {
max_duration: usize,
bonus_points: usize,
graph: HashMap<usize, HashSet<String>>,
streets: HashMap<String, Street>,
paths: HashSet<Path>,
}
impl Solver {
fn from_str(input: &str) -> Self {
let lines = input
.trim()
.split('\n')
.collect::<Vec<_>>();
let header = lines[0].split_whitespace().map(|v| v.parse::<usize>().unwrap()).collect::<Vec<_>>();
let (d, _, s, v, f) = (header[0], header[1], header[2], header[3], header[4]);
let mut streets = HashMap::new();
for k in 1..1 + s {
let street = Street::from_str(lines[k]);
streets.insert(street.name.clone(), street);
}
let mut graph = HashMap::new();
for street in streets.values() {
graph.entry(street.to).or_insert_with(HashSet::new).insert(street.name.clone());
}
let mut paths = HashSet::new();
for k in (1 + s)..(1 + s + v) {
let path = Path::from_str(lines[k]);
paths.insert(path);
}
return Self::new(d, f, streets, graph, paths);
}
fn new(
max_duration: usize,
bonus_points: usize,
streets: HashMap<String, Street>,
graph: HashMap<usize, HashSet<String>>,
paths: HashSet<Path>
) -> Self {
Self {
max_duration: max_duration,
bonus_points: bonus_points,
streets: streets,
graph: graph,
paths: paths,
}
}
fn solve(&mut self) -> Solution {
let mut solution = Solution::new();
for path in &self.paths {
let mut duration = 0;
for street in &path.streets {
duration += self.streets.get(street).unwrap().transit;
self.streets.get_mut(street).unwrap().expected_visits.push(duration);
self.streets.get_mut(street).unwrap().visits += 1;
}
}
for (intersection_id, streets) in &self.graph {
let mut streets: Vec<_> = streets
.iter()
.clone()
.filter_map(|name| {
let street = self.streets.get(name).unwrap();
if street.visits == 0 {
return None;
}
return Some(street);
})
.collect();
if streets.is_empty() {
continue;
}
streets.sort_unstable_by_key(|street| cmp::Reverse(street.transit));
let mut incoming = Vec::new();
let mut cycle = 1;
let mut last = streets[0].transit;
for street in streets {
if last > street.transit {
last = street.transit;
cycle += 1;
}
incoming.push((street.name.clone(), cycle));
}
let intersection = Intersection::new(*intersection_id, incoming);
solution.insert(intersection);
}
return solution;
}
}
use std::cmp;
#[derive(Debug)]
struct Street {
from: usize,
to: usize,
name: String,
transit: usize,
visits: usize,
expected_visits: Vec<usize>,
}
impl Street {
fn from_str(raw: &str) -> Self {
let values: Vec<_> = raw.trim().split_whitespace().collect();
let from = values[0].parse::<usize>().unwrap();
let to = values[1].parse::<usize>().unwrap();
let name = String::from(values[2]);
let transit = values[3].parse::<usize>().unwrap();
return Self::new(from, to, name, transit);
}
fn new(from: usize, to: usize, name: String, transit: usize) -> Self {
Self { from: from, to: to, name: name, transit: transit, visits: 0, expected_visits: Vec::new()}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
struct Path {
streets: Vec<String>,
}
impl Path {
fn from_str(raw: &str) -> Self {
let values: Vec<String> = raw.trim().split_whitespace().skip(1).map(String::from).collect();
return Self::new(values);
}
fn new(streets: Vec<String>) -> Self {
Self { streets: streets }
}
}
#[derive(Debug)]
struct Solution {
intersections: HashSet<Intersection>,
}
impl Solution {
fn new() -> Self {
Self { intersections: HashSet::new() }
}
fn insert(&mut self, intersection: Intersection) {
self.intersections.insert(intersection);
}
fn to_string(&self) -> String {
let mut ans = String::new();
ans.push_str(&self.intersections.len().to_string());
for intersection in &self.intersections {
ans.push('\n');
ans.push_str(&intersection.to_string());
}
return ans;
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
struct Intersection {
id: usize,
incoming: Vec<(String, usize)>
}
impl Intersection {
fn new(id: usize, incoming: Vec<(String, usize)>) -> Self {
Self { id: id, incoming: incoming }
}
fn to_string(&self) -> String {
let mut ans = String::new();
ans.push_str(&format!("{}\n", self.id));
ans.push_str(&self.incoming.len().to_string());
for item in &self.incoming {
ans.push('\n');
ans.push_str(&format!("{} {}", item.0, item.1));
}
return ans;
}
}
fn read_input() -> io::Result<String> {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer)?;
return Ok(buffer);
}
fn write_output(solution: Solution) {
println!("{}", solution.to_string());
}
| true |
b97dde5b6f15364dbfc9169474aebd1212babb74
|
Rust
|
k124k3n/competitive-programming-answer
|
/codesignal/largestNumber.rs
|
UTF-8
| 167 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
fn largestNumber(n: i32) -> i32 {
if n == 0 {
return n;
}
let mut out = 9;
for i in 1..n {
out *= 10;
out += 9;
}
out
}
| true |
62aa2389e4b844e79b836993fb75f28eab00910d
|
Rust
|
mateuszptr/rust_dhcp
|
/src/main.rs
|
UTF-8
| 3,138 | 2.515625 | 3 |
[] |
no_license
|
#![feature(int_to_from_bytes)]
extern crate byteorder;
extern crate bytes;
extern crate serde;
extern crate serde_json;
#[macro_use] extern crate serde_derive;
extern crate actix;
#[macro_use] extern crate actix_derive;
extern crate libc;
extern crate hwaddr;
mod dhcp_frames;
mod dhcp_options;
mod config;
mod server_actor;
mod io_actor;
use std::thread;
use std::fs::File;
use std::io::prelude::*;
use config::*;
use std::net::{UdpSocket, SocketAddr, IpAddr, Ipv4Addr};
use actix::prelude::*;
use io_actor::OutputActor;
use server_actor::ServerActor;
use dhcp_frames::DHCPPacket;
use std::os::unix::io::AsRawFd;
use std::ffi::CString;
use libc::c_void;
/// Biblioteka standardowa rusta owrapowuje niektóre wywołania funkcji setsockopt, ale nie zapewnia jej całej funkcjonalności.
/// Ponieważ adres ip do broadcastu nie informuje nas o interfejsie, domyślny zostanie wybrany przez OS.
/// Pozostaje nam ustawić ręcznie interfejs za pomocą opcji SO_BINDTODEVICE
unsafe fn set_socket_device(socket: &UdpSocket, iface: &str) {
let fd = socket.as_raw_fd();
let lvl = libc::SOL_SOCKET;
let name = libc::SO_BINDTODEVICE;
let val = CString::new(iface).unwrap();
let pointer = val.as_ptr() as *const c_void;
let len = val.as_bytes_with_nul().len();
libc::setsockopt(
fd,
lvl,
name,
pointer,
len as libc::socklen_t
);
}
fn main() {
let system = actix::System::new("dhcp");
// otwieramy plik konfiguracyjny w formacie JSON, wczytujemy go do struktury Config
let mut config_file = File::open("Config.json").expect("Couldn't open config file");
let mut config_content = String::new();
config_file.read_to_string(&mut config_content).expect("Couldn't read config file");
let config = get_config(config_content);
//Tworzymy socket zbindowany na 0.0.0.0, na port 67 (standardowy port serwera DHCP), na interfejs podany w konfiguracji, z broadcastem.
let socket = UdpSocket::bind(SocketAddr::new(IpAddr::from(Ipv4Addr::from([0,0,0,0])), 67)).expect("Couldn't bind a socket");
unsafe { set_socket_device(&socket, config.interface.as_str()); }
socket.set_broadcast(true).expect("Couldn't set socket to bcast");
let input_socket = socket.try_clone().expect("Couldn't clone the socket");
// Aktor odpowiadający za wysyłanie wiadomości na socket
let output_actor: Addr<Syn, _> = OutputActor::new(socket).start();
// Aktor obsługujący logikę serwera DHCP
let server_actor: Addr<Syn, _> = ServerActor::new(config, output_actor.clone()).start();
// Tworzymy wątek odbierający w tle pakiety (recv_from) i wysyłający je do aktora serwera.
let _input_thread_handle = thread::spawn(move || {
loop {
println!("Creating buffer");
let mut buf = vec![0u8; 1024];
let (_, addr) = input_socket.recv_from(&mut buf).unwrap();
println!("Received frame from {}", addr);
let packet = DHCPPacket::from_vec(buf).unwrap();
server_actor.do_send(packet);
}
});
//Start systemu aktorów
system.run();
}
| true |
dbe93392a7856f3749ee596cdc81e6a796745de4
|
Rust
|
prisma/prisma-engines
|
/query-engine/prisma-models/src/selection_result.rs
|
UTF-8
| 5,231 | 3.203125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::{DomainError, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField};
use itertools::Itertools;
use std::convert::TryFrom;
/// Represents a set of results.
#[derive(Default, Clone, PartialEq, Eq, Hash)]
pub struct SelectionResult {
pub pairs: Vec<(SelectedField, PrismaValue)>,
}
impl std::fmt::Debug for SelectionResult {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_list()
.entries(
&self
.pairs
.iter()
.map(|pair| (format!("{}", pair.0), pair.1.clone()))
.collect_vec(),
)
.finish()
}
}
impl SelectionResult {
pub fn new<T>(pairs: Vec<(T, PrismaValue)>) -> Self
where
T: Into<SelectedField>,
{
Self {
pairs: pairs.into_iter().map(|(rt, value)| (rt.into(), value)).collect(),
}
}
pub fn add<T>(&mut self, pair: (T, PrismaValue))
where
T: Into<SelectedField>,
{
self.pairs.push((pair.0.into(), pair.1));
}
pub fn get(&self, selection: &SelectedField) -> Option<&PrismaValue> {
self.pairs.iter().find_map(|(result_selection, value)| {
if selection == result_selection {
Some(value)
} else {
None
}
})
}
pub fn values(&self) -> impl Iterator<Item = PrismaValue> + '_ {
self.pairs.iter().map(|p| p.1.clone())
}
pub fn len(&self) -> usize {
self.pairs.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn db_names(&self) -> impl Iterator<Item = &str> + '_ {
self.pairs.iter().map(|(field, _)| field.db_name())
}
/// Consumes this `SelectionResult` and splits it into a set of `SelectionResult`s based on the passed
/// `FieldSelection`s. Assumes that the transformation can be done.
pub fn split_into(self, field_selections: &[FieldSelection]) -> Vec<SelectionResult> {
field_selections
.iter()
.map(|field_selection| {
let pairs: Vec<_> = field_selection
.selections()
.map(|selected_field| {
self.get(selected_field)
.map(|value| (selected_field.clone(), value.clone()))
.expect("Error splitting `ReturnValues`: `FieldSelection` doesn't match.")
})
.collect();
SelectionResult::new(pairs)
})
.collect()
}
/// Checks if `self` only contains scalar field selections and if so, returns them all in a list.
/// If any other selection is contained, returns `None`.
pub fn as_scalar_fields(&self) -> Option<Vec<ScalarFieldRef>> {
let scalar_fields: Vec<_> = self
.pairs
.iter()
.filter_map(|(selection, _)| match selection {
SelectedField::Scalar(sf) => Some(sf.clone()),
SelectedField::Composite(_) => None,
})
.collect();
if scalar_fields.len() == self.pairs.len() {
Some(scalar_fields)
} else {
None
}
}
/// Coerces contained values to best fit their type.
/// - Scalar fields coerce values based on the TypeIdentifier.
/// - Composite fields must be objects and contained fields must also follow the type coherence.
pub fn coerce_values(self) -> crate::Result<Self> {
let pairs = self
.pairs
.into_iter()
.map(|(selection, value)| {
let value = selection.coerce_value(value)?;
Ok((selection, value))
})
.collect::<crate::Result<Vec<_>>>()?;
Ok(Self { pairs })
}
}
impl TryFrom<SelectionResult> for PrismaValue {
type Error = DomainError;
fn try_from(return_values: SelectionResult) -> crate::Result<Self> {
match return_values.pairs.into_iter().next() {
Some(value) => Ok(value.1),
None => Err(DomainError::ConversionFailure(
"ReturnValues".into(),
"PrismaValue".into(),
)),
}
}
}
impl IntoIterator for SelectionResult {
type Item = (SelectedField, PrismaValue);
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.pairs.into_iter()
}
}
impl<T> From<(T, PrismaValue)> for SelectionResult
where
T: Into<SelectedField>,
{
fn from((x, value): (T, PrismaValue)) -> Self {
Self::new(vec![(x.into(), value)])
}
}
impl<T> From<Vec<(T, PrismaValue)>> for SelectionResult
where
T: Into<SelectedField>,
{
fn from(tuples: Vec<(T, PrismaValue)>) -> Self {
Self::new(tuples.into_iter().map(|(x, value)| (x.into(), value)).collect())
}
}
impl From<&FieldSelection> for SelectionResult {
fn from(fs: &FieldSelection) -> Self {
Self {
pairs: fs
.selections()
.map(|selection| (selection.clone(), PrismaValue::Null))
.collect(),
}
}
}
| true |
e399c03576c227ce4347ada70b8fb4314f443680
|
Rust
|
wa7sa34cx/WhyDoYou-bot
|
/src/utils/locale.rs
|
UTF-8
| 4,945 | 3.046875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! Localization helper
use crate::models::error::HandlerError;
use include_dir::{include_dir, Dir, File};
use lazy_static::lazy_static;
use log::{error, info, warn};
use regex::{Captures, Regex};
use std::collections::HashMap;
lazy_static! {
/// Shared instance of Locale
pub static ref TEXTS: Locale = Locale::parse().unwrap();
static ref ROW_REGEX: Regex = regex::Regex::new("\"([\\w]+)\" = \"([^\"]+)\";").unwrap();
}
const LOCALE_DIR: Dir = include_dir!("assets/locale");
#[derive(Debug)]
pub struct Locale {
locales: Vec<LocaleFileMeta>,
}
impl Locale {
fn parse() -> Result<Self, HandlerError> {
let mut locales: Vec<LocaleFileMeta> = Vec::new();
for file in LOCALE_DIR.files() {
if let Some(meta) = LocaleFileMeta::from(file) {
locales.push(meta);
}
}
let item = Self { locales };
item._test_keys()?;
Ok(item)
}
/// Get localized string
///
/// Parameters:
/// - key: localization key
/// - lang: language code (ex.: en, ru)
///
/// Return: localized string or key
pub fn get(&self, key: &str, lang: &str) -> String {
let result = &self
.locales
.iter()
.find(|l| l.lang.to_lowercase() == lang.to_lowercase())
.or_else(|| *{ &self.locales.iter().find(|l| l.is_base) })
.and_then(|l| l.data.get(key).to_owned().and_then(|s| Some(s.as_str())))
.unwrap_or(key);
return result.to_string();
}
fn _test_keys(&self) -> Result<(), HandlerError> {
let mut is_error = false;
for locale in &self.locales {
&self.locales.iter().for_each(|l| {
if l.lang == locale.lang {
return;
};
locale.data.iter().for_each(|a| {
if !l.data.contains_key(a.0) {
if l.is_base {
is_error = true;
error!(
"{} lang not contain '{}' key which is in {} lang",
l.title(),
a.0,
locale.title()
);
} else {
warn!(
"{} lang not contain '{}' key which is in {} lang",
l.title(),
a.0,
locale.title()
);
}
}
})
});
}
if is_error {
Err(HandlerError::from_str("Locales has errors."))
} else {
Ok(())
}
}
}
#[derive(Debug)]
struct LocaleFileMeta {
lang: String,
is_base: bool,
data: HashMap<String, String>,
}
impl LocaleFileMeta {
fn from(file: &File) -> Option<Self> {
if let Some(os_str_name) = file.path().file_name() {
if let Some(raw_str) = os_str_name.to_str() {
let components = raw_str.split(".").collect::<Vec<&str>>();
if components.len() > 3 || components.len() < 2 {
return None;
}
if components.last().unwrap().to_lowercase() != String::from("locale") {
return None;
}
if let Some(content) = file.contents_utf8() {
let f_content = content
.split("\n")
.filter(|row| !row.starts_with("//") || !row.is_empty())
.collect::<Vec<&str>>()
.join("\n");
let mut data: HashMap<String, String> = HashMap::new();
for row in ROW_REGEX
.captures_iter(&*f_content)
.collect::<Vec<Captures>>()
{
let key = row.get(1).unwrap().as_str().parse().unwrap();
let value = row.get(2).unwrap().as_str().parse().unwrap();
data.insert(key, value);
}
let item = LocaleFileMeta {
lang: components.get(0).unwrap().parse().unwrap(),
is_base: components.len() == 3
&& components.get(1).unwrap().to_lowercase() == String::from("base"),
data,
};
info!("{} language found & loaded.", item.title());
return Some(item);
}
}
}
None
}
fn title(&self) -> String {
let mut result = String::from(&self.lang.clone());
if self.is_base {
result.push_str("*");
}
result.to_string().to_uppercase()
}
}
| true |
6dd8177bfc0f4b5bf78e749af2e13ba80cebe258
|
Rust
|
russelltg/srt-rs
|
/srt-protocol/src/packet/msg_number.rs
|
UTF-8
| 229 | 2.625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::modular_num::modular_num;
modular_num! {
pub MsgNumber(u32, 26)
}
impl MsgNumber {
#[must_use]
pub fn increment(&mut self) -> Self {
let result = *self;
*self += 1;
result
}
}
| true |
bbca1674e59b8a03bc3683d625ef4f58f01db247
|
Rust
|
ajm188/advent_of_code
|
/2015/day01/main.rs
|
UTF-8
| 1,158 | 3.359375 | 3 |
[
"MIT"
] |
permissive
|
use std::env::args;
struct Santa {
position: i32,
first_time_in_basement: i32,
num_movements: i32,
}
impl Santa {
fn has_been_in_basement(&self) -> bool {
self.first_time_in_basement >= 0
}
fn from_santa(santa: Santa, movement: i32) -> Santa {
let pos = santa.position + movement;
let movements = santa.num_movements + 1;
let basement = if santa.has_been_in_basement() || pos >= 0 {
santa.first_time_in_basement
} else {
movements
};
Santa {
position: pos,
first_time_in_basement: basement,
num_movements: movements,
}
}
}
fn main() {
let instructions = match args().nth(1) {
Some(v) => v,
None => "".to_string(),
};
let santa = Santa {
position: 0,
first_time_in_basement: -1,
num_movements: 0,
};
let last_santa: Santa = instructions
.chars()
.map(|c: char| if c == '(' { 1 } else { -1 })
.fold(santa, |s, i| Santa::from_santa(s, i));
println!("{} {}", last_santa.position, last_santa.first_time_in_basement);
}
| true |
16e7f44ffefec6125de2b0621a53a7dd1e33864a
|
Rust
|
flanfly/rust-pottcpp
|
/numortxt.rs
|
UTF-8
| 480 | 3.953125 | 4 |
[
"MIT"
] |
permissive
|
enum NumberOrText {
Number(i32),
Text(String)
}
fn print_number_or_text(nt: NumberOrText) {
match nt {
NumberOrText::Number(i) => println!("Number: {}",i),
NumberOrText::Text(t) => println!("Text: {}",t)
}
}
fn main() {
let a: NumberOrText = NumberOrText::Number(42);
let b: NumberOrText = NumberOrText::Text("Hello, World".to_string());
// Prints "Number: 42"
print_number_or_text(a);
// Prints "Text: Hello, World"
print_number_or_text(b);
}
| true |
ef3cdeae12f65e6bfe91cbba96412998279f48d7
|
Rust
|
AurelienAubry/lc3-vm
|
/src/instructions/not.rs
|
UTF-8
| 2,267 | 3.5 | 4 |
[] |
no_license
|
use crate::bus::Bus;
use crate::cpu::{register_from_u16, Register, Registers};
use crate::instructions::Instruction;
use anyhow::Result;
pub struct Not {
dst_reg: Register,
src_reg: Register,
}
impl Not {
pub fn new(instruction: u16) -> Result<Self> {
let dst_reg = register_from_u16(instruction >> 9 & 0x7)?;
let src_reg = register_from_u16(instruction >> 6 & 0x7)?;
Ok(Self { dst_reg, src_reg })
}
}
impl Instruction for Not {
fn run(&self, registers: &mut Registers, _bus: &mut Bus) -> Result<()> {
registers.write_register(self.dst_reg, !registers.read_register(self.src_reg));
registers.update_flags(self.dst_reg);
Ok(())
}
fn to_str(&self) -> String {
format!("NOT {:?}, {:?}", self.dst_reg, self.src_reg,)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bus::Bus;
use crate::cpu::Flag;
use crate::instructions::decode;
#[test]
fn test_run() {
let mut reg = Registers::new();
let mut bus = Bus::new().unwrap();
// ZRO FLAG
reg.write_register(Register::R0, 0b1111_1111_1111_1111);
let instruction = decode(0b1001_001_000_1_11111).unwrap();
instruction.run(&mut reg, &mut bus).unwrap();
assert_eq!(reg.read_register(Register::R1), 0);
assert_eq!(reg.read_register(Register::COND), Flag::Zro as u16);
// POS FLAG
reg.write_register(Register::R0, 0b1000_1111_1111_1111);
let instruction = decode(0b1001_001_000_1_11111).unwrap();
instruction.run(&mut reg, &mut bus).unwrap();
assert_eq!(reg.read_register(Register::R1), 0b0111_0000_0000_0000);
assert_eq!(reg.read_register(Register::COND), Flag::Pos as u16);
// NEG FLAG
reg.write_register(Register::R0, 0b0111_1010_1010_1010);
let instruction = decode(0b1001_001_000_1_11111).unwrap();
instruction.run(&mut reg, &mut bus).unwrap();
assert_eq!(reg.read_register(Register::R1), 0b1000_0101_0101_0101);
assert_eq!(reg.read_register(Register::COND), Flag::Neg as u16);
}
#[test]
fn test_to_str() {
let inst = decode(0b1001_001_000_1_11111).unwrap();
assert_eq!(inst.to_str(), "NOT R1, R0");
}
}
| true |
6ae9a645c26ac86e6b447a7bedc95240071b3be4
|
Rust
|
rjloura/proxy-rs
|
/src/utils.rs
|
UTF-8
| 1,207 | 3.3125 | 3 |
[
"MIT"
] |
permissive
|
use chrono::prelude::*;
const DEFAULT_UNIT: f64 = 1024_f64;
const SUFFIX: &[&str] = &["k", "M", "G", "T", "P", "E"];
/// Takes the number of bytes and converts it to a human readable string
pub fn pretty_bytes(b: u64) -> String {
let b = b as f64;
if b < DEFAULT_UNIT {
return format!("{:.0} B", b);
}
let idx = (b.log10() / DEFAULT_UNIT.log10()) as usize;
let b = b / DEFAULT_UNIT.powi(idx as i32);
let suffix = SUFFIX[idx.wrapping_sub(1)];
format!("{:.1} {}B", b, suffix)
}
pub fn log<S: AsRef<str>>(message: S) {
let dt: DateTime<Local> = Local::now();
println!(
"{} {}",
dt.format("%Y-%m-%d %H:%M:%S").to_string(),
message.as_ref()
);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn pretty_bytes_test() {
assert_eq!("1 B", pretty_bytes(1));
assert_eq!("1.0 kB", pretty_bytes(1024));
assert_eq!("1.0 MB", pretty_bytes(1024u64.pow(2)));
assert_eq!("1.0 GB", pretty_bytes(1024u64.pow(3)));
assert_eq!("1.0 TB", pretty_bytes(1024u64.pow(4)));
assert_eq!("1.0 PB", pretty_bytes(1024u64.pow(5)));
assert_eq!("1.0 EB", pretty_bytes(1024u64.pow(6)));
}
}
| true |
d796f16109e37e30ecb4c6dd6bc3dbc71133b2d3
|
Rust
|
BurntSushi/rust-sorts
|
/src/lib.rs
|
UTF-8
| 9,148 | 2.90625 | 3 |
[
"Unlicense"
] |
permissive
|
#![crate_id = "sorts#0.1.0"]
#![crate_type = "lib"]
#![license = "UNLICENSE"]
#![doc(html_root_url = "http://burntsushi.net/rustdoc/rust-sorts")]
#![feature(phase)]
#![feature(macro_rules)]
//! A collection of sorting algorithms with tests and benchmarks.
#[phase(syntax, link)] extern crate log;
extern crate stdtest = "test";
extern crate quickcheck;
extern crate rand;
use rand::Rng; // why do I need this?
#[cfg(test)]
mod bench;
#[cfg(test)]
mod test;
pub static INSERTION_THRESHOLD: uint = 16;
/// The `bogo` sort is the simplest but worst sorting algorithm. It shuffles
/// the given input until it is sorted. Its worst case space complexity is
/// `O(n)` but its time complexity is *unbounded*.
pub fn bogo<T: TotalOrd>(xs: &mut [T]) {
fn is_sorted<T: TotalOrd>(xs: &[T]) -> bool {
for win in xs.windows(2) {
if win[0] > win[1] {
return false
}
}
true
}
let rng = &mut rand::task_rng();
while !is_sorted(xs) {
rng.shuffle_mut(xs);
}
}
/// Classic in place insertion sort. Worst case time complexity is `O(n^2)`.
pub fn insertion<T: TotalOrd>(xs: &mut [T]) {
let (mut i, len) = (1, xs.len());
while i < len {
let mut j = i;
while j > 0 && xs[j-1] > xs[j] {
xs.swap(j, j-1);
j = j - 1;
}
i = i + 1;
}
}
/// Classic in place bubble sort. Worst case time complexity is `O(n^2)`.
pub fn bubble<T: TotalOrd>(xs: &mut [T]) {
let mut n = xs.len();
while n > 0 {
let mut newn = 0;
let mut i = 1;
while i < n {
if xs[i-1] > xs[i] {
xs.swap(i-1, i);
newn = i;
}
i = i + 1;
}
n = newn;
}
}
/// Classic in place selection sort. Worst case time complexity is `O(n^2)`.
/// Note that this is an *unstable* implementation.
pub fn selection<T: TotalOrd>(xs: &mut [T]) {
let (mut i, len) = (0, xs.len());
while i < len {
let (mut j, mut cur_min) = (i + 1, i);
while j < len {
if xs[j] < xs[cur_min] {
cur_min = j;
}
j = j + 1;
}
xs.swap(i, cur_min);
i = i + 1;
}
}
pub mod quick {
use super::INSERTION_THRESHOLD;
/// Standard in-place quicksort that always uses the first element as
/// a pivot. Average time complexity is `O(nlogn)` and its space complexity
/// is `O(1)` (limited to vectors of size `N`, which is the maximum number
/// expressible with a `uint`).
pub fn dumb<T: TotalOrd>(xs: &mut [T]) {
fn pivot<T: TotalOrd>(_: &[T]) -> uint { 0 }
qsort(xs, pivot)
}
/// Standard in-place quicksort that uses the median of the first, middle
/// and last elements in each vector for the pivot.
/// Average time complexity is `O(nlogn)` and its space complexity
/// is `O(1)` (limited to vectors of size `N`, which is the maximum number
/// expressible with a `uint`).
///
/// This seems to have the same performance characteristics as the `dumb`
/// quicksort, except when the input is almost sorted where intelligently
/// choosing a pivot helps by at least an order of magnitude. (This is
/// because an almost-sorted vector given to the `dumb` quicksort provokes
/// worse case `O(n^2)` performance, whereas picking a pivot intelligently
/// helps keep it closer to the average `O(nlogn)` performance.)
pub fn smart<T: TotalOrd>(xs: &mut [T]) {
qsort(xs, smart_pivot)
}
pub fn insertion<T: TotalOrd>(xs: &mut [T]) {
if xs.len() <= 1 {
return
}
let p = smart_pivot(xs);
let p = partition(xs, p);
if p <= INSERTION_THRESHOLD {
super::insertion(xs.mut_slice_to(p))
} else {
qsort(xs.mut_slice_to(p), smart_pivot);
}
if xs.len() - p+1 <= INSERTION_THRESHOLD {
super::insertion(xs.mut_slice_from(p+1))
} else {
qsort(xs.mut_slice_from(p+1), smart_pivot);
}
}
fn qsort<T: TotalOrd>(xs: &mut [T], pivot: fn(&[T]) -> uint) {
if xs.len() <= 1 {
return
}
let p = pivot(xs);
let p = partition(xs, p);
qsort(xs.mut_slice_to(p), pivot);
qsort(xs.mut_slice_from(p+1), pivot);
}
fn partition<T: TotalOrd>(xs: &mut [T], p: uint) -> uint {
if xs.len() <= 1 {
return p
}
let lasti = xs.len() - 1;
let (mut i, mut nextp) = (0, 0);
xs.swap(lasti, p);
while i < lasti {
if xs[i] <= xs[lasti] {
xs.swap(i, nextp);
nextp = nextp + 1;
}
i = i + 1;
}
xs.swap(nextp, lasti);
nextp
}
fn smart_pivot<T: TotalOrd>(xs: &[T]) -> uint {
let (l, r) = (0, xs.len() - 1);
let m = l + ((r - l) / 2);
let (left, middle, right) = (&xs[l], &xs[m], &xs[r]);
if middle >= left && middle <= right {
m
} else if left >= middle && left <= right {
l
} else {
r
}
}
}
pub mod heap {
pub fn up<T: TotalOrd>(xs: &mut [T]) {
sort(xs, heapify_up);
}
pub fn down<T: TotalOrd>(xs: &mut [T]) {
sort(xs, heapify_down);
}
fn sort<T: TotalOrd>(xs: &mut [T], heapify: fn(&mut [T])) {
if xs.len() <= 1 {
return
}
heapify(xs);
let mut end = xs.len() - 1;
while end > 0 {
xs.swap(end, 0);
end = end - 1;
sift_down(xs, 0, end);
}
}
fn heapify_down<T: TotalOrd>(xs: &mut [T]) {
let last = xs.len() - 1;
let mut start = 1 + ((last - 1) / 2);
while start > 0 {
start = start - 1;
sift_down(xs, start, last);
}
}
fn sift_down<T: TotalOrd>(xs: &mut [T], start: uint, end: uint) {
let mut root = start;
while root * 2 + 1 <= end {
let child = root * 2 + 1;
let mut swap = root;
if xs[swap] < xs[child] {
swap = child
}
if child + 1 <= end && xs[swap] < xs[child+1] {
swap = child + 1
}
if swap == root {
return
}
xs.swap(root, swap);
root = swap;
}
}
fn heapify_up<T: TotalOrd>(xs: &mut [T]) {
let mut end = 1;
while end < xs.len() {
sift_up(xs, 0, end);
end = end + 1;
}
}
fn sift_up<T: TotalOrd>(xs: &mut [T], start: uint, end: uint) {
let mut child = end;
while child > start {
let parent = (child - 1) / 2;
if xs[parent] >= xs[child] {
return
}
xs.swap(parent, child);
child = parent;
}
}
}
pub mod merge {
use std::cmp::min;
use std::slice::MutableCloneableVector;
use super::INSERTION_THRESHOLD;
/// A stable mergesort with worst case `O(nlogn)` performance. This
/// particular implementation has `O(n)` complexity. Unfortunately, the
/// constant factor is pretty high.
///
/// (See Rust's standard library `sort` function for a better mergesort
/// which uses unsafe, I think.)
pub fn sort<T: TotalOrd + Clone>(xs: &mut [T]) {
let (len, mut width) = (xs.len(), 1);
let mut buf = xs.to_owned();
while width < len {
let mut start = 0;
while start < len {
let mid = min(len, start + width);
let end = min(len, start + 2 * width);
merge(xs, buf, start, mid, end);
start = start + 2 * width;
}
width = width * 2;
xs.copy_from(buf);
}
}
pub fn insertion<T: TotalOrd + Clone>(xs: &mut [T]) {
let (len, mut width) = (xs.len(), INSERTION_THRESHOLD);
let mut i = 0;
while i < len {
let upto = min(len, i + INSERTION_THRESHOLD);
super::insertion(xs.mut_slice(i, upto));
i = i + INSERTION_THRESHOLD;
}
let mut buf = xs.to_owned();
while width < len {
let mut start = 0;
while start < len {
let mid = min(len, start + width);
let end = min(len, start + 2 * width);
merge(xs, buf, start, mid, end);
start = start + 2 * width;
}
width = width * 2;
xs.copy_from(buf);
}
}
fn merge<T: TotalOrd + Clone>
(xs: &mut [T], buf: &mut [T], l: uint, r: uint, e: uint) {
let (mut il, mut ir) = (l, r);
let mut i = l;
while i < e {
if il < r && (ir >= e || xs[il] <= xs[ir]) {
buf[i] = xs[il].clone();
il = il + 1;
} else {
buf[i] = xs[ir].clone();
ir = ir + 1;
}
i = i + 1;
}
}
}
| true |
69824be9d4a6657364d2f9835d88edf36306243b
|
Rust
|
dalance/nom-greedyerror
|
/examples/nom7.rs
|
UTF-8
| 1,122 | 2.8125 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use nom7::branch::alt;
use nom7::character::complete::{alpha1, digit1};
use nom7::error::{ErrorKind, ParseError, VerboseError};
use nom7::sequence::tuple;
use nom7::Err::Error;
use nom7::IResult;
use nom_greedyerror::{error_position, GreedyError, Position};
use nom_locate4::LocatedSpan;
type Span<'a> = LocatedSpan<&'a str>;
fn parser<'a, E: ParseError<Span<'a>>>(
input: Span<'a>,
) -> IResult<Span<'a>, (Span<'a>, Span<'a>, Span<'a>), E> {
alt((
tuple((alpha1, digit1, alpha1)),
tuple((digit1, alpha1, digit1)),
))(input)
}
fn main() {
// VerboseError failed at
// abc012:::
// ^
let error = parser::<VerboseError<Span>>(Span::new("abc012:::"));
dbg!(&error);
match error {
Err(Error(e)) => assert_eq!(e.errors.first().map(|x| x.0.position()), Some(0)),
_ => (),
};
// GreedyError failed at
// abc012:::
// ^
let error = parser::<GreedyError<Span, ErrorKind>>(Span::new("abc012:::"));
dbg!(&error);
match error {
Err(Error(e)) => assert_eq!(error_position(&e), Some(6)),
_ => (),
};
}
| true |
a96a37f9547b1bf69e973f22f148f22517062db9
|
Rust
|
Tyler-Zhang/words-with-coworkers
|
/words-game/src/error.rs
|
UTF-8
| 1,025 | 3.1875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::error;
use std::fmt;
#[derive(Debug)]
pub enum Error {
BadAction(String),
NotEnoughTiles,
InvalidWord(String),
StartingTileNotCovered,
WordDoesNotIntersect,
NoLettersUsed,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::BadAction(ref err) => write!(f, "Bad Action error: {}", err),
Error::NotEnoughTiles => write!(f, "Not enough tiles"),
Error::InvalidWord(ref word) => write!(f, "Word <{}> not in the dictionary", word),
Error::StartingTileNotCovered => write!(f, "Starting tile needs to be covered"),
Error::WordDoesNotIntersect => write!(f, "Word does not intersect with another word"),
Error::NoLettersUsed => write!(f, "You must use at least one letter"),
}
}
}
impl error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> {
Some(self)
}
}
pub type Result<T> = std::result::Result<T, Box<Error>>;
| true |
f991c37a6fb8194d58418eae04de737454fc9698
|
Rust
|
tiredhaydn/project_euler
|
/src/bin/problem009/main.rs
|
UTF-8
| 343 | 2.953125 | 3 |
[] |
no_license
|
fn main() {
let mut answer = 0;
for a in 3..=998 {
for b in 4..=997 {
let c = 1000 - a - b;
if a * a + b * b == c * c {
let abc = a * b * c;
if abc > answer {
answer = abc;
}
}
}
}
println!("{}", answer);
}
| true |
132a4d55730bbebc5b9c22282d60101b60f86f0c
|
Rust
|
liang610/flux
|
/libflux/src/flux/semantic/convert.rs
|
UTF-8
| 118,059 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use crate::ast;
use crate::semantic::fresh::Fresher;
use crate::semantic::nodes::*;
use crate::semantic::types::MonoType;
use std::result;
pub type SemanticError = String;
pub type Result<T> = result::Result<T, SemanticError>;
/// convert_with converts an AST package node to its semantic representation using
/// the provided fresher.
///
/// Note: most external callers of this function will want to use the analyze()
/// function in the libstd crate instead, which is aware of everything in the Flux stdlib and prelude.
///
/// The function explicitly moves the ast::Package because it adds information to it.
/// We follow here the principle that every compilation step should be isolated and should add meaning
/// to the previous one. In other terms, once one converts an AST he should not use it anymore.
/// If one wants to do so, he should explicitly pkg.clone() and incur consciously in the memory
/// overhead involved.
pub fn convert_with(pkg: ast::Package, fresher: &mut Fresher) -> Result<Package> {
convert_package(pkg, fresher)
// TODO(affo): run checks on the semantic graph.
}
fn convert_package(pkg: ast::Package, fresher: &mut Fresher) -> Result<Package> {
let files = pkg
.files
.into_iter()
.map(|f| convert_file(f, fresher))
.collect::<Result<Vec<File>>>()?;
Ok(Package {
loc: pkg.base.location,
package: pkg.package,
files,
})
}
pub fn convert_file(file: ast::File, fresher: &mut Fresher) -> Result<File> {
let package = convert_package_clause(file.package, fresher)?;
let imports = file
.imports
.into_iter()
.map(|i| convert_import_declaration(i, fresher))
.collect::<Result<Vec<ImportDeclaration>>>()?;
let body = file
.body
.into_iter()
.map(|s| convert_statement(s, fresher))
.collect::<Result<Vec<Statement>>>()?;
Ok(File {
loc: file.base.location,
package,
imports,
body,
})
}
fn convert_package_clause(
pkg: Option<ast::PackageClause>,
fresher: &mut Fresher,
) -> Result<Option<PackageClause>> {
if pkg.is_none() {
return Ok(None);
}
let pkg = pkg.unwrap();
let name = convert_identifier(pkg.name, fresher)?;
Ok(Some(PackageClause {
loc: pkg.base.location,
name,
}))
}
fn convert_import_declaration(
imp: ast::ImportDeclaration,
fresher: &mut Fresher,
) -> Result<ImportDeclaration> {
let alias = match imp.alias {
None => None,
Some(id) => Some(convert_identifier(id, fresher)?),
};
let path = convert_string_literal(imp.path, fresher)?;
Ok(ImportDeclaration {
loc: imp.base.location,
alias,
path,
})
}
fn convert_statement(stmt: ast::Statement, fresher: &mut Fresher) -> Result<Statement> {
match stmt {
ast::Statement::Option(s) => Ok(Statement::Option(Box::new(convert_option_statement(
*s, fresher,
)?))),
ast::Statement::Builtin(s) => {
Ok(Statement::Builtin(convert_builtin_statement(*s, fresher)?))
}
ast::Statement::Test(s) => Ok(Statement::Test(Box::new(convert_test_statement(
*s, fresher,
)?))),
ast::Statement::Expr(s) => Ok(Statement::Expr(convert_expression_statement(*s, fresher)?)),
ast::Statement::Return(s) => Ok(Statement::Return(convert_return_statement(*s, fresher)?)),
// TODO(affo): we should fix this to include MemberAssignement.
// The error lies in AST: the Statement enum does not include that.
// This is not a problem when parsing, because we parse it only in the option assignment case,
// and we return an OptionStmt, which is a Statement.
ast::Statement::Variable(s) => Ok(Statement::Variable(Box::new(
convert_variable_assignment(*s, fresher)?,
))),
ast::Statement::Bad(_) => {
Err("BadStatement is not supported in semantic analysis".to_string())
}
}
}
fn convert_assignment(assign: ast::Assignment, fresher: &mut Fresher) -> Result<Assignment> {
match assign {
ast::Assignment::Variable(a) => Ok(Assignment::Variable(convert_variable_assignment(
*a, fresher,
)?)),
ast::Assignment::Member(a) => {
Ok(Assignment::Member(convert_member_assignment(*a, fresher)?))
}
}
}
fn convert_option_statement(stmt: ast::OptionStmt, fresher: &mut Fresher) -> Result<OptionStmt> {
Ok(OptionStmt {
loc: stmt.base.location,
assignment: convert_assignment(stmt.assignment, fresher)?,
})
}
fn convert_builtin_statement(stmt: ast::BuiltinStmt, fresher: &mut Fresher) -> Result<BuiltinStmt> {
Ok(BuiltinStmt {
loc: stmt.base.location,
id: convert_identifier(stmt.id, fresher)?,
})
}
fn convert_test_statement(stmt: ast::TestStmt, fresher: &mut Fresher) -> Result<TestStmt> {
Ok(TestStmt {
loc: stmt.base.location,
assignment: convert_variable_assignment(stmt.assignment, fresher)?,
})
}
fn convert_expression_statement(stmt: ast::ExprStmt, fresher: &mut Fresher) -> Result<ExprStmt> {
Ok(ExprStmt {
loc: stmt.base.location,
expression: convert_expression(stmt.expression, fresher)?,
})
}
fn convert_return_statement(stmt: ast::ReturnStmt, fresher: &mut Fresher) -> Result<ReturnStmt> {
Ok(ReturnStmt {
loc: stmt.base.location,
argument: convert_expression(stmt.argument, fresher)?,
})
}
fn convert_variable_assignment(
stmt: ast::VariableAssgn,
fresher: &mut Fresher,
) -> Result<VariableAssgn> {
Ok(VariableAssgn::new(
convert_identifier(stmt.id, fresher)?,
convert_expression(stmt.init, fresher)?,
stmt.base.location,
))
}
fn convert_member_assignment(stmt: ast::MemberAssgn, fresher: &mut Fresher) -> Result<MemberAssgn> {
Ok(MemberAssgn {
loc: stmt.base.location,
member: convert_member_expression(stmt.member, fresher)?,
init: convert_expression(stmt.init, fresher)?,
})
}
fn convert_expression(expr: ast::Expression, fresher: &mut Fresher) -> Result<Expression> {
match expr {
ast::Expression::Function(expr) => Ok(Expression::Function(Box::new(convert_function_expression(*expr, fresher)?))),
ast::Expression::Call(expr) => Ok(Expression::Call(Box::new(convert_call_expression(*expr, fresher)?))),
ast::Expression::Member(expr) => Ok(Expression::Member(Box::new(convert_member_expression(*expr, fresher)?))),
ast::Expression::Index(expr) => Ok(Expression::Index(Box::new(convert_index_expression(*expr, fresher)?))),
ast::Expression::PipeExpr(expr) => Ok(Expression::Call(Box::new(convert_pipe_expression(*expr, fresher)?))),
ast::Expression::Binary(expr) => Ok(Expression::Binary(Box::new(convert_binary_expression(*expr, fresher)?))),
ast::Expression::Unary(expr) => Ok(Expression::Unary(Box::new(convert_unary_expression(*expr, fresher)?))),
ast::Expression::Logical(expr) => Ok(Expression::Logical(Box::new(convert_logical_expression(*expr, fresher)?))),
ast::Expression::Conditional(expr) => Ok(Expression::Conditional(Box::new(convert_conditional_expression(*expr, fresher)?))),
ast::Expression::Object(expr) => Ok(Expression::Object(Box::new(convert_object_expression(*expr, fresher)?))),
ast::Expression::Array(expr) => Ok(Expression::Array(Box::new(convert_array_expression(*expr, fresher)?))),
ast::Expression::Identifier(expr) => Ok(Expression::Identifier(convert_identifier_expression(expr, fresher)?)),
ast::Expression::StringExpr(expr) => Ok(Expression::StringExpr(Box::new(convert_string_expression(*expr, fresher)?))),
ast::Expression::Paren(expr) => convert_expression(expr.expression, fresher),
ast::Expression::StringLit(lit) => Ok(Expression::StringLit(convert_string_literal(lit, fresher)?)),
ast::Expression::Boolean(lit) => Ok(Expression::Boolean(convert_boolean_literal(lit, fresher)?)),
ast::Expression::Float(lit) => Ok(Expression::Float(convert_float_literal(lit, fresher)?)),
ast::Expression::Integer(lit) => Ok(Expression::Integer(convert_integer_literal(lit, fresher)?)),
ast::Expression::Uint(lit) => Ok(Expression::Uint(convert_unsigned_integer_literal(lit, fresher)?)),
ast::Expression::Regexp(lit) => Ok(Expression::Regexp(convert_regexp_literal(lit, fresher)?)),
ast::Expression::Duration(lit) => Ok(Expression::Duration(convert_duration_literal(lit, fresher)?)),
ast::Expression::DateTime(lit) => Ok(Expression::DateTime(convert_date_time_literal(lit, fresher)?)),
ast::Expression::PipeLit(_) => Err("a pipe literal may only be used as a default value for an argument in a function definition".to_string()),
ast::Expression::Bad(_) => Err("BadExpression is not supported in semantic analysis".to_string())
}
}
fn convert_function_expression(
expr: ast::FunctionExpr,
fresher: &mut Fresher,
) -> Result<FunctionExpr> {
let params = convert_function_params(expr.params, fresher)?;
let body = convert_function_body(expr.body, fresher)?;
Ok(FunctionExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
params,
body,
})
}
fn convert_function_params(
props: Vec<ast::Property>,
fresher: &mut Fresher,
) -> Result<Vec<FunctionParameter>> {
// The iteration here is complex, cannot use iter().map()..., better to write it explicitly.
let mut params: Vec<FunctionParameter> = Vec::new();
let mut piped = false;
for prop in props {
let id = match prop.key {
ast::PropertyKey::Identifier(id) => Ok(id),
_ => Err("function params must be identifiers".to_string()),
}?;
let key = convert_identifier(id, fresher)?;
let mut default: Option<Expression> = None;
let mut is_pipe = false;
if let Some(expr) = prop.value {
match expr {
ast::Expression::PipeLit(_) => {
if piped {
return Err("only a single argument may be piped".to_string());
} else {
piped = true;
is_pipe = true;
};
}
e => default = Some(convert_expression(e, fresher)?),
}
};
params.push(FunctionParameter {
loc: prop.base.location,
is_pipe,
key,
default,
});
}
Ok(params)
}
fn convert_function_body(body: ast::FunctionBody, fresher: &mut Fresher) -> Result<Block> {
match body {
ast::FunctionBody::Expr(expr) => {
let argument = convert_expression(expr, fresher)?;
Ok(Block::Return(ReturnStmt {
loc: argument.loc().clone(),
argument,
}))
}
ast::FunctionBody::Block(block) => Ok(convert_block(block, fresher)?),
}
}
fn convert_block(block: ast::Block, fresher: &mut Fresher) -> Result<Block> {
let mut body = block.body.into_iter().rev();
let block = if let Some(ast::Statement::Return(stmt)) = body.next() {
let argument = convert_expression(stmt.argument, fresher)?;
Block::Return(ReturnStmt {
loc: stmt.base.location.clone(),
argument,
})
} else {
return Err("missing return statement in block".to_string());
};
body.try_fold(block, |acc, s| match s {
ast::Statement::Variable(dec) => Ok(Block::Variable(
Box::new(convert_variable_assignment(*dec, fresher)?),
Box::new(acc),
)),
ast::Statement::Expr(stmt) => Ok(Block::Expr(
convert_expression_statement(*stmt, fresher)?,
Box::new(acc),
)),
_ => Err(format!("invalid statement in function block {:#?}", s)),
})
}
fn convert_call_expression(expr: ast::CallExpr, fresher: &mut Fresher) -> Result<CallExpr> {
let callee = convert_expression(expr.callee, fresher)?;
// TODO(affo): I'd prefer these checks to be in ast.Check().
if expr.arguments.len() > 1 {
return Err("arguments are more than one object expression".to_string());
}
let mut args = expr
.arguments
.into_iter()
.map(|a| match a {
ast::Expression::Object(obj) => convert_object_expression(*obj, fresher),
_ => Err("arguments not an object expression".to_string()),
})
.collect::<Result<Vec<ObjectExpr>>>()?;
let arguments = match args.len() {
0 => Ok(Vec::new()),
1 => Ok(args.pop().expect("there must be 1 element").properties),
_ => Err("arguments are more than one object expression".to_string()),
}?;
Ok(CallExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
callee,
arguments,
pipe: None,
})
}
fn convert_member_expression(expr: ast::MemberExpr, fresher: &mut Fresher) -> Result<MemberExpr> {
let object = convert_expression(expr.object, fresher)?;
let property = match expr.property {
ast::PropertyKey::Identifier(id) => id.name,
ast::PropertyKey::StringLit(lit) => lit.value,
};
Ok(MemberExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
object,
property,
})
}
fn convert_index_expression(expr: ast::IndexExpr, fresher: &mut Fresher) -> Result<IndexExpr> {
let array = convert_expression(expr.array, fresher)?;
let index = convert_expression(expr.index, fresher)?;
Ok(IndexExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
array,
index,
})
}
fn convert_pipe_expression(expr: ast::PipeExpr, fresher: &mut Fresher) -> Result<CallExpr> {
let mut call = convert_call_expression(expr.call, fresher)?;
let pipe = convert_expression(expr.argument, fresher)?;
call.pipe = Some(pipe);
Ok(call)
}
fn convert_binary_expression(expr: ast::BinaryExpr, fresher: &mut Fresher) -> Result<BinaryExpr> {
let left = convert_expression(expr.left, fresher)?;
let right = convert_expression(expr.right, fresher)?;
Ok(BinaryExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
operator: expr.operator,
left,
right,
})
}
fn convert_unary_expression(expr: ast::UnaryExpr, fresher: &mut Fresher) -> Result<UnaryExpr> {
let argument = convert_expression(expr.argument, fresher)?;
Ok(UnaryExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
operator: expr.operator,
argument,
})
}
fn convert_logical_expression(
expr: ast::LogicalExpr,
fresher: &mut Fresher,
) -> Result<LogicalExpr> {
let left = convert_expression(expr.left, fresher)?;
let right = convert_expression(expr.right, fresher)?;
Ok(LogicalExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
operator: expr.operator,
left,
right,
})
}
fn convert_conditional_expression(
expr: ast::ConditionalExpr,
fresher: &mut Fresher,
) -> Result<ConditionalExpr> {
let test = convert_expression(expr.test, fresher)?;
let consequent = convert_expression(expr.consequent, fresher)?;
let alternate = convert_expression(expr.alternate, fresher)?;
Ok(ConditionalExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
test,
consequent,
alternate,
})
}
fn convert_object_expression(expr: ast::ObjectExpr, fresher: &mut Fresher) -> Result<ObjectExpr> {
let properties = expr
.properties
.into_iter()
.map(|p| convert_property(p, fresher))
.collect::<Result<Vec<Property>>>()?;
let with = match expr.with {
Some(with) => Some(convert_identifier_expression(with.source, fresher)?),
None => None,
};
Ok(ObjectExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
with,
properties,
})
}
fn convert_property(prop: ast::Property, fresher: &mut Fresher) -> Result<Property> {
let key = match prop.key {
ast::PropertyKey::Identifier(id) => convert_identifier(id, fresher)?,
ast::PropertyKey::StringLit(lit) => Identifier {
loc: lit.base.location.clone(),
name: convert_string_literal(lit, fresher)?.value,
},
};
let value = match prop.value {
Some(expr) => convert_expression(expr, fresher)?,
None => Expression::Identifier(IdentifierExpr {
loc: key.loc.clone(),
typ: MonoType::Var(fresher.fresh()),
name: key.name.clone(),
}),
};
Ok(Property {
loc: prop.base.location,
key,
value,
})
}
fn convert_array_expression(expr: ast::ArrayExpr, fresher: &mut Fresher) -> Result<ArrayExpr> {
let elements = expr
.elements
.into_iter()
.map(|e| convert_expression(e.expression, fresher))
.collect::<Result<Vec<Expression>>>()?;
Ok(ArrayExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
elements,
})
}
fn convert_identifier(id: ast::Identifier, _fresher: &mut Fresher) -> Result<Identifier> {
Ok(Identifier {
loc: id.base.location,
name: id.name,
})
}
fn convert_identifier_expression(
id: ast::Identifier,
fresher: &mut Fresher,
) -> Result<IdentifierExpr> {
Ok(IdentifierExpr {
loc: id.base.location,
typ: MonoType::Var(fresher.fresh()),
name: id.name,
})
}
fn convert_string_expression(expr: ast::StringExpr, fresher: &mut Fresher) -> Result<StringExpr> {
let parts = expr
.parts
.into_iter()
.map(|p| convert_string_expression_part(p, fresher))
.collect::<Result<Vec<StringExprPart>>>()?;
Ok(StringExpr {
loc: expr.base.location,
typ: MonoType::Var(fresher.fresh()),
parts,
})
}
fn convert_string_expression_part(
expr: ast::StringExprPart,
fresher: &mut Fresher,
) -> Result<StringExprPart> {
match expr {
ast::StringExprPart::Text(txt) => Ok(StringExprPart::Text(TextPart {
loc: txt.base.location,
value: txt.value,
})),
ast::StringExprPart::Interpolated(itp) => {
Ok(StringExprPart::Interpolated(InterpolatedPart {
loc: itp.base.location,
expression: convert_expression(itp.expression, fresher)?,
}))
}
}
}
fn convert_string_literal(lit: ast::StringLit, fresher: &mut Fresher) -> Result<StringLit> {
Ok(StringLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: lit.value,
})
}
fn convert_boolean_literal(lit: ast::BooleanLit, fresher: &mut Fresher) -> Result<BooleanLit> {
Ok(BooleanLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: lit.value,
})
}
fn convert_float_literal(lit: ast::FloatLit, fresher: &mut Fresher) -> Result<FloatLit> {
Ok(FloatLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: lit.value,
})
}
fn convert_integer_literal(lit: ast::IntegerLit, fresher: &mut Fresher) -> Result<IntegerLit> {
Ok(IntegerLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: lit.value,
})
}
fn convert_unsigned_integer_literal(lit: ast::UintLit, fresher: &mut Fresher) -> Result<UintLit> {
Ok(UintLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: lit.value,
})
}
fn convert_regexp_literal(lit: ast::RegexpLit, fresher: &mut Fresher) -> Result<RegexpLit> {
Ok(RegexpLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: lit.value,
})
}
fn convert_duration_literal(lit: ast::DurationLit, fresher: &mut Fresher) -> Result<DurationLit> {
Ok(DurationLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: convert_duration(&lit.values)?,
})
}
fn convert_date_time_literal(lit: ast::DateTimeLit, fresher: &mut Fresher) -> Result<DateTimeLit> {
Ok(DateTimeLit {
loc: lit.base.location,
typ: MonoType::Var(fresher.fresh()),
value: lit.value,
})
}
// In these tests we test the results of semantic analysis on some ASTs.
// NOTE: we do not care about locations.
// We create a default base node and clone it in various AST nodes.
#[cfg(test)]
mod tests {
use super::*;
use crate::semantic::fresh;
use crate::semantic::types::{MonoType, Tvar};
use pretty_assertions::assert_eq;
// type_info() is used for the expected semantic graph.
// The id for the Tvar does not matter, because that is not compared.
fn type_info() -> MonoType {
MonoType::Var(Tvar(0))
}
fn test_convert(pkg: ast::Package) -> Result<Package> {
convert_with(pkg, &mut fresh::Fresher::default())
}
#[test]
fn test_convert_empty() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: Vec::new(),
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: Vec::new(),
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_package() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: Some(ast::PackageClause {
base: b.clone(),
name: ast::Identifier {
base: b.clone(),
name: "foo".to_string(),
},
}),
imports: Vec::new(),
body: Vec::new(),
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: Some(PackageClause {
loc: b.location.clone(),
name: Identifier {
loc: b.location.clone(),
name: "foo".to_string(),
},
}),
imports: Vec::new(),
body: Vec::new(),
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_imports() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: Some(ast::PackageClause {
base: b.clone(),
name: ast::Identifier {
base: b.clone(),
name: "foo".to_string(),
},
}),
imports: vec![
ast::ImportDeclaration {
base: b.clone(),
path: ast::StringLit {
base: b.clone(),
value: "path/foo".to_string(),
},
alias: None,
},
ast::ImportDeclaration {
base: b.clone(),
path: ast::StringLit {
base: b.clone(),
value: "path/bar".to_string(),
},
alias: Some(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
},
],
body: Vec::new(),
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: Some(PackageClause {
loc: b.location.clone(),
name: Identifier {
loc: b.location.clone(),
name: "foo".to_string(),
},
}),
imports: vec![
ImportDeclaration {
loc: b.location.clone(),
path: StringLit {
loc: b.location.clone(),
typ: type_info(),
value: "path/foo".to_string(),
},
alias: None,
},
ImportDeclaration {
loc: b.location.clone(),
path: StringLit {
loc: b.location.clone(),
typ: type_info(),
value: "path/bar".to_string(),
},
alias: Some(Identifier {
loc: b.location.clone(),
name: "b".to_string(),
}),
},
],
body: Vec::new(),
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_var_assignment() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![
ast::Statement::Variable(Box::new(ast::VariableAssgn {
base: b.clone(),
id: ast::Identifier {
base: b.clone(),
name: "a".to_string(),
},
init: ast::Expression::Boolean(ast::BooleanLit {
base: b.clone(),
value: true,
}),
})),
ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
})),
],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![
Statement::Variable(Box::new(VariableAssgn::new(
Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
Expression::Boolean(BooleanLit {
loc: b.location.clone(),
typ: type_info(),
value: true,
}),
b.location.clone(),
))),
Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
}),
],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_object() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 10,
})),
comma: None,
}],
rbrace: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Object(Box::new(ObjectExpr {
loc: b.location.clone(),
typ: type_info(),
with: None,
properties: vec![Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 10,
}),
}],
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_object_with_string_key() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![ast::Property {
base: b.clone(),
key: ast::PropertyKey::StringLit(ast::StringLit {
base: b.clone(),
value: "a".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 10,
})),
comma: None,
}],
rbrace: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Object(Box::new(ObjectExpr {
loc: b.location.clone(),
typ: type_info(),
with: None,
properties: vec![Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 10,
}),
}],
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_object_with_mixed_keys() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::StringLit(ast::StringLit {
base: b.clone(),
value: "a".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 10,
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 11,
})),
comma: None,
},
],
rbrace: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Object(Box::new(ObjectExpr {
loc: b.location.clone(),
typ: type_info(),
with: None,
properties: vec![
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 10,
}),
},
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "b".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 11,
}),
},
],
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_object_with_implicit_keys() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: None,
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
separator: None,
value: None,
comma: None,
},
],
rbrace: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Object(Box::new(ObjectExpr {
loc: b.location.clone(),
typ: type_info(),
with: None,
properties: vec![
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
value: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
},
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "b".to_string(),
},
value: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "b".to_string(),
}),
},
],
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_options_declaration() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Option(Box::new(ast::OptionStmt {
base: b.clone(),
assignment: ast::Assignment::Variable(Box::new(ast::VariableAssgn {
base: b.clone(),
id: ast::Identifier {
base: b.clone(),
name: "task".to_string(),
},
init: ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "name".to_string(),
}),
separator: None,
value: Some(ast::Expression::StringLit(ast::StringLit {
base: b.clone(),
value: "foo".to_string(),
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "every".to_string(),
}),
separator: None,
value: Some(ast::Expression::Duration(ast::DurationLit {
base: b.clone(),
values: vec![ast::Duration {
magnitude: 1,
unit: "h".to_string(),
}],
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "delay".to_string(),
}),
separator: None,
value: Some(ast::Expression::Duration(ast::DurationLit {
base: b.clone(),
values: vec![ast::Duration {
magnitude: 10,
unit: "m".to_string(),
}],
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "cron".to_string(),
}),
separator: None,
value: Some(ast::Expression::StringLit(ast::StringLit {
base: b.clone(),
value: "0 2 * * *".to_string(),
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "retry".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 5,
})),
comma: None,
},
],
rbrace: None,
})),
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Option(Box::new(OptionStmt {
loc: b.location.clone(),
assignment: Assignment::Variable(VariableAssgn::new(
Identifier {
loc: b.location.clone(),
name: "task".to_string(),
},
Expression::Object(Box::new(ObjectExpr {
loc: b.location.clone(),
typ: type_info(),
with: None,
properties: vec![
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "name".to_string(),
},
value: Expression::StringLit(StringLit {
loc: b.location.clone(),
typ: type_info(),
value: "foo".to_string(),
}),
},
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "every".to_string(),
},
value: Expression::Duration(DurationLit {
loc: b.location.clone(),
typ: type_info(),
value: Duration {
months: 5,
nanoseconds: 5000,
negative: false,
},
}),
},
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "delay".to_string(),
},
value: Expression::Duration(DurationLit {
loc: b.location.clone(),
typ: type_info(),
value: Duration {
months: 1,
nanoseconds: 50,
negative: true,
},
}),
},
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "cron".to_string(),
},
value: Expression::StringLit(StringLit {
loc: b.location.clone(),
typ: type_info(),
value: "0 2 * * *".to_string(),
}),
},
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "retry".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 5,
}),
},
],
})),
b.location.clone(),
)),
}))],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_qualified_option_statement() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Option(Box::new(ast::OptionStmt {
base: b.clone(),
assignment: ast::Assignment::Member(Box::new(ast::MemberAssgn {
base: b.clone(),
member: ast::MemberExpr {
base: b.clone(),
object: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "alert".to_string(),
}),
lbrack: None,
property: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "state".to_string(),
}),
rbrack: None,
},
init: ast::Expression::StringLit(ast::StringLit {
base: b.clone(),
value: "Warning".to_string(),
}),
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Option(Box::new(OptionStmt {
loc: b.location.clone(),
assignment: Assignment::Member(MemberAssgn {
loc: b.location.clone(),
member: MemberExpr {
loc: b.location.clone(),
typ: type_info(),
object: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "alert".to_string(),
}),
property: "state".to_string(),
},
init: Expression::StringLit(StringLit {
loc: b.location.clone(),
typ: type_info(),
value: "Warning".to_string(),
}),
}),
}))],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_function() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![
ast::Statement::Variable(Box::new(ast::VariableAssgn {
base: b.clone(),
id: ast::Identifier {
base: b.clone(),
name: "f".to_string(),
},
init: ast::Expression::Function(Box::new(ast::FunctionExpr {
base: b.clone(),
lparen: None,
params: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: None,
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
separator: None,
value: None,
comma: None,
},
],
rparen: None,
arrow: None,
body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new(
ast::BinaryExpr {
base: b.clone(),
operator: ast::Operator::AdditionOperator,
left: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
right: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
},
))),
})),
})),
ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Call(Box::new(ast::CallExpr {
base: b.clone(),
callee: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "f".to_string(),
}),
lparen: None,
arguments: vec![ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 2,
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 3,
})),
comma: None,
},
],
rbrace: None,
}))],
rparen: None,
})),
})),
],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![
Statement::Variable(Box::new(VariableAssgn::new(
Identifier {
loc: b.location.clone(),
name: "f".to_string(),
},
Expression::Function(Box::new(FunctionExpr {
loc: b.location.clone(),
typ: type_info(),
params: vec![
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
default: None,
},
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "b".to_string(),
},
default: None,
},
],
body: Block::Return(ReturnStmt {
loc: b.location.clone(),
argument: Expression::Binary(Box::new(BinaryExpr {
loc: b.location.clone(),
typ: type_info(),
operator: ast::Operator::AdditionOperator,
left: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
right: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "b".to_string(),
}),
})),
}),
})),
b.location.clone(),
))),
Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Call(Box::new(CallExpr {
loc: b.location.clone(),
typ: type_info(),
pipe: None,
callee: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "f".to_string(),
}),
arguments: vec![
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 2,
}),
},
Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "b".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 3,
}),
},
],
})),
}),
],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_function_with_defaults() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![
ast::Statement::Variable(Box::new(ast::VariableAssgn {
base: b.clone(),
id: ast::Identifier {
base: b.clone(),
name: "f".to_string(),
},
init: ast::Expression::Function(Box::new(ast::FunctionExpr {
base: b.clone(),
lparen: None,
params: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 0,
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 0,
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "c".to_string(),
}),
separator: None,
value: None,
comma: None,
},
],
rparen: None,
arrow: None,
body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new(
ast::BinaryExpr {
base: b.clone(),
operator: ast::Operator::AdditionOperator,
left: ast::Expression::Binary(Box::new(ast::BinaryExpr {
base: b.clone(),
operator: ast::Operator::AdditionOperator,
left: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
right: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
})),
right: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "c".to_string(),
}),
},
))),
})),
})),
ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Call(Box::new(ast::CallExpr {
base: b.clone(),
callee: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "f".to_string(),
}),
lparen: None,
arguments: vec![ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "c".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 42,
})),
comma: None,
}],
rbrace: None,
}))],
rparen: None,
})),
})),
],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![
Statement::Variable(Box::new(VariableAssgn::new(
Identifier {
loc: b.location.clone(),
name: "f".to_string(),
},
Expression::Function(Box::new(FunctionExpr {
loc: b.location.clone(),
typ: type_info(),
params: vec![
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
default: Some(Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 0,
})),
},
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "b".to_string(),
},
default: Some(Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 0,
})),
},
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "c".to_string(),
},
default: None,
},
],
body: Block::Return(ReturnStmt {
loc: b.location.clone(),
argument: Expression::Binary(Box::new(BinaryExpr {
loc: b.location.clone(),
typ: type_info(),
operator: ast::Operator::AdditionOperator,
left: Expression::Binary(Box::new(BinaryExpr {
loc: b.location.clone(),
typ: type_info(),
operator: ast::Operator::AdditionOperator,
left: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
right: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "b".to_string(),
}),
})),
right: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "c".to_string(),
}),
})),
}),
})),
b.location.clone(),
))),
Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Call(Box::new(CallExpr {
loc: b.location.clone(),
typ: type_info(),
pipe: None,
callee: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "f".to_string(),
}),
arguments: vec![Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "c".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 42,
}),
}],
})),
}),
],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_function_multiple_pipes() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Variable(Box::new(ast::VariableAssgn {
base: b.clone(),
id: ast::Identifier {
base: b.clone(),
name: "f".to_string(),
},
init: ast::Expression::Function(Box::new(ast::FunctionExpr {
base: b.clone(),
lparen: None,
params: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: None,
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "piped1".to_string(),
}),
separator: None,
value: Some(ast::Expression::PipeLit(ast::PipeLit {
base: b.clone(),
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "piped2".to_string(),
}),
separator: None,
value: Some(ast::Expression::PipeLit(ast::PipeLit {
base: b.clone(),
})),
comma: None,
},
],
rparen: None,
arrow: None,
body: ast::FunctionBody::Expr(ast::Expression::Identifier(
ast::Identifier {
base: b.clone(),
name: "a".to_string(),
},
)),
})),
}))],
eof: None,
}],
};
let got = test_convert(pkg).err().unwrap().to_string();
assert_eq!("only a single argument may be piped".to_string(), got);
}
#[test]
fn test_convert_call_multiple_object_arguments() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Call(Box::new(ast::CallExpr {
base: b.clone(),
callee: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "f".to_string(),
}),
lparen: None,
arguments: vec![
ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 0,
})),
comma: None,
}],
rbrace: None,
})),
ast::Expression::Object(Box::new(ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 1,
})),
comma: None,
}],
rbrace: None,
})),
],
rparen: None,
})),
}))],
eof: None,
}],
};
let got = test_convert(pkg).err().unwrap().to_string();
assert_eq!(
"arguments are more than one object expression".to_string(),
got
);
}
#[test]
fn test_convert_pipe_expression() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![
ast::Statement::Variable(Box::new(ast::VariableAssgn {
base: b.clone(),
id: ast::Identifier {
base: b.clone(),
name: "f".to_string(),
},
init: ast::Expression::Function(Box::new(ast::FunctionExpr {
base: b.clone(),
lparen: None,
params: vec![
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "piped".to_string(),
}),
separator: None,
value: Some(ast::Expression::PipeLit(ast::PipeLit {
base: b.clone(),
})),
comma: None,
},
ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: None,
comma: None,
},
],
rparen: None,
arrow: None,
body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new(
ast::BinaryExpr {
base: b.clone(),
operator: ast::Operator::AdditionOperator,
left: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
right: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "piped".to_string(),
}),
},
))),
})),
})),
ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::PipeExpr(Box::new(ast::PipeExpr {
base: b.clone(),
argument: ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 3,
}),
call: ast::CallExpr {
base: b.clone(),
callee: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "f".to_string(),
}),
lparen: None,
arguments: vec![ast::Expression::Object(Box::new(
ast::ObjectExpr {
base: b.clone(),
lbrace: None,
with: None,
properties: vec![ast::Property {
base: b.clone(),
key: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
separator: None,
value: Some(ast::Expression::Integer(
ast::IntegerLit {
base: b.clone(),
value: 2,
},
)),
comma: None,
}],
rbrace: None,
},
))],
rparen: None,
},
})),
})),
],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![
Statement::Variable(Box::new(VariableAssgn::new(
Identifier {
loc: b.location.clone(),
name: "f".to_string(),
},
Expression::Function(Box::new(FunctionExpr {
loc: b.location.clone(),
typ: type_info(),
params: vec![
FunctionParameter {
loc: b.location.clone(),
is_pipe: true,
key: Identifier {
loc: b.location.clone(),
name: "piped".to_string(),
},
default: None,
},
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
default: None,
},
],
body: Block::Return(ReturnStmt {
loc: b.location.clone(),
argument: Expression::Binary(Box::new(BinaryExpr {
loc: b.location.clone(),
typ: type_info(),
operator: ast::Operator::AdditionOperator,
left: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
right: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "piped".to_string(),
}),
})),
}),
})),
b.location.clone(),
))),
Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Call(Box::new(CallExpr {
loc: b.location.clone(),
typ: type_info(),
pipe: Some(Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 3,
})),
callee: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "f".to_string(),
}),
arguments: vec![Property {
loc: b.location.clone(),
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
value: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 2,
}),
}],
})),
}),
],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_function_expression_simple() {
let b = ast::BaseNode::default();
let f = FunctionExpr {
loc: b.location.clone(),
typ: type_info(),
params: vec![
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
default: None,
},
FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "b".to_string(),
},
default: None,
},
],
body: Block::Return(ReturnStmt {
loc: b.location.clone(),
argument: Expression::Binary(Box::new(BinaryExpr {
loc: b.location.clone(),
typ: type_info(),
operator: ast::Operator::AdditionOperator,
left: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
right: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "b".to_string(),
}),
})),
}),
};
assert_eq!(Vec::<&FunctionParameter>::new(), f.defaults());
assert_eq!(None, f.pipe());
}
#[test]
fn test_function_expression_defaults_and_pipes() {
let b = ast::BaseNode::default();
let piped = FunctionParameter {
loc: b.location.clone(),
is_pipe: true,
key: Identifier {
loc: b.location.clone(),
name: "a".to_string(),
},
default: Some(Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 0,
})),
};
let default1 = FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "b".to_string(),
},
default: Some(Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 1,
})),
};
let default2 = FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "c".to_string(),
},
default: Some(Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 2,
})),
};
let no_default = FunctionParameter {
loc: b.location.clone(),
is_pipe: false,
key: Identifier {
loc: b.location.clone(),
name: "d".to_string(),
},
default: None,
};
let defaults = vec![&piped, &default1, &default2];
let f = FunctionExpr {
loc: b.location.clone(),
typ: type_info(),
params: vec![
piped.clone(),
default1.clone(),
default2.clone(),
no_default.clone(),
],
body: Block::Return(ReturnStmt {
loc: b.location.clone(),
argument: Expression::Binary(Box::new(BinaryExpr {
loc: b.location.clone(),
typ: type_info(),
operator: ast::Operator::AdditionOperator,
left: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
right: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "b".to_string(),
}),
})),
}),
};
assert_eq!(defaults, f.defaults());
assert_eq!(Some(&piped), f.pipe());
}
#[test]
fn test_convert_index_expression() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Index(Box::new(ast::IndexExpr {
base: b.clone(),
array: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
lbrack: None,
index: ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 3,
}),
rbrack: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Index(Box::new(IndexExpr {
loc: b.location.clone(),
typ: type_info(),
array: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
index: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 3,
}),
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_nested_index_expression() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Index(Box::new(ast::IndexExpr {
base: b.clone(),
array: ast::Expression::Index(Box::new(ast::IndexExpr {
base: b.clone(),
array: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
lbrack: None,
index: ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 3,
}),
rbrack: None,
})),
lbrack: None,
index: ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 5,
}),
rbrack: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Index(Box::new(IndexExpr {
loc: b.location.clone(),
typ: type_info(),
array: Expression::Index(Box::new(IndexExpr {
loc: b.location.clone(),
typ: type_info(),
array: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
index: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 3,
}),
})),
index: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 5,
}),
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_access_idexed_object_returned_from_function_call() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Index(Box::new(ast::IndexExpr {
base: b.clone(),
array: ast::Expression::Call(Box::new(ast::CallExpr {
base: b.clone(),
callee: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "f".to_string(),
}),
lparen: None,
arguments: vec![],
rparen: None,
})),
lbrack: None,
index: ast::Expression::Integer(ast::IntegerLit {
base: b.clone(),
value: 3,
}),
rbrack: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Index(Box::new(IndexExpr {
loc: b.location.clone(),
typ: type_info(),
array: Expression::Call(Box::new(CallExpr {
loc: b.location.clone(),
typ: type_info(),
pipe: None,
callee: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "f".to_string(),
}),
arguments: Vec::new(),
})),
index: Expression::Integer(IntegerLit {
loc: b.location.clone(),
typ: type_info(),
value: 3,
}),
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_nested_member_expression() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Member(Box::new(ast::MemberExpr {
base: b.clone(),
object: ast::Expression::Member(Box::new(ast::MemberExpr {
base: b.clone(),
object: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
lbrack: None,
property: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
rbrack: None,
})),
lbrack: None,
property: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "c".to_string(),
}),
rbrack: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Member(Box::new(MemberExpr {
loc: b.location.clone(),
typ: type_info(),
object: Expression::Member(Box::new(MemberExpr {
loc: b.location.clone(),
typ: type_info(),
object: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
property: "b".to_string(),
})),
property: "c".to_string(),
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_member_with_call_expression() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Member(Box::new(ast::MemberExpr {
base: b.clone(),
object: ast::Expression::Call(Box::new(ast::CallExpr {
base: b.clone(),
callee: ast::Expression::Member(Box::new(ast::MemberExpr {
base: b.clone(),
object: ast::Expression::Identifier(ast::Identifier {
base: b.clone(),
name: "a".to_string(),
}),
lbrack: None,
property: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "b".to_string(),
}),
rbrack: None,
})),
lparen: None,
arguments: vec![],
rparen: None,
})),
lbrack: None,
property: ast::PropertyKey::Identifier(ast::Identifier {
base: b.clone(),
name: "c".to_string(),
}),
rbrack: None,
})),
}))],
eof: None,
}],
};
let want = Package {
loc: b.location.clone(),
package: "main".to_string(),
files: vec![File {
loc: b.location.clone(),
package: None,
imports: Vec::new(),
body: vec![Statement::Expr(ExprStmt {
loc: b.location.clone(),
expression: Expression::Member(Box::new(MemberExpr {
loc: b.location.clone(),
typ: type_info(),
object: Expression::Call(Box::new(CallExpr {
loc: b.location.clone(),
typ: type_info(),
pipe: None,
callee: Expression::Member(Box::new(MemberExpr {
loc: b.location.clone(),
typ: type_info(),
object: Expression::Identifier(IdentifierExpr {
loc: b.location.clone(),
typ: type_info(),
name: "a".to_string(),
}),
property: "b".to_string(),
})),
arguments: Vec::new(),
})),
property: "c".to_string(),
})),
})],
}],
};
let got = test_convert(pkg).unwrap();
assert_eq!(want, got);
}
#[test]
fn test_convert_bad_stmt() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Bad(Box::new(ast::BadStmt {
base: b.clone(),
text: "bad statement".to_string(),
}))],
eof: None,
}],
};
let want: Result<Package> =
Err("BadStatement is not supported in semantic analysis".to_string());
let got = test_convert(pkg);
assert_eq!(want, got);
}
#[test]
fn test_convert_bad_expr() {
let b = ast::BaseNode::default();
let pkg = ast::Package {
base: b.clone(),
path: "path".to_string(),
package: "main".to_string(),
files: vec![ast::File {
base: b.clone(),
name: "foo.flux".to_string(),
metadata: String::new(),
package: None,
imports: Vec::new(),
body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {
base: b.clone(),
expression: ast::Expression::Bad(Box::new(ast::BadExpr {
base: b.clone(),
text: "bad expression".to_string(),
expression: None,
})),
}))],
eof: None,
}],
};
let want: Result<Package> =
Err("BadExpression is not supported in semantic analysis".to_string());
let got = test_convert(pkg);
assert_eq!(want, got);
}
}
| true |
12e6e4ad88859f08e5b01b898b0ec9d33a62cb58
|
Rust
|
Vechro/roman
|
/src/lib.rs
|
UTF-8
| 3,082 | 3.578125 | 4 |
[] |
no_license
|
mod test;
const fn roman_lut(numeral: &char) -> Option<usize> {
match numeral {
'I' => Some(1),
'V' => Some(5),
'X' => Some(10),
'L' => Some(50),
'C' => Some(100),
'D' => Some(500),
'M' => Some(1000),
_ => None,
}
}
const fn arabic_lut(digit: &usize) -> Option<&str> {
match digit {
1 => Some("I"),
4 => Some("IV"),
5 => Some("V"),
9 => Some("IX"),
10 => Some("X"),
40 => Some("XL"),
50 => Some("L"),
90 => Some("XC"),
100 => Some("C"),
400 => Some("CD"),
500 => Some("D"),
900 => Some("DM"),
1000 => Some("M"),
_ => None,
}
}
static DIGITS_DESC: [usize; 13] = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1];
struct Tally {
total: usize,
max: usize,
}
// Impure function as it prints to stdout immediately.
pub fn convert_and_print_numerals(list_of_numerals: &[String]) {
for number_str in list_of_numerals {
let result = match number_str.chars().next() {
Some(c) => match c {
c if c.is_ascii_alphabetic() => roman_to_arabic(&number_str.to_ascii_uppercase()),
c if c.is_ascii_digit() => arabic_to_roman(number_str),
_ => None,
},
_ => unreachable!(),
};
match result {
Some(s) => println!("{}", s),
None => println!("Invalid numerals!"),
};
}
}
fn arabic_to_roman(arabic_numerals: &str) -> Option<String> {
let mut num = match arabic_numerals.parse::<usize>() {
Ok(n) => n,
Err(_) => return None,
};
let result = DIGITS_DESC
.iter()
.fold(String::new(), |mut state: String, digit| {
let quot = num / *digit;
num = num % *digit;
let numeral = match arabic_lut(digit) {
Some(s) => s,
None => unreachable!(),
};
state.push_str(&numeral.repeat(quot));
state
});
Some(result)
}
fn roman_to_arabic(roman_numerals: &str) -> Option<String> {
let result = roman_numerals.chars().rfold(
Some(Tally { total: 0, max: 0 }),
|tally: Option<Tally>, c| {
let current_value = match roman_lut(&c) {
Some(val) => val,
None => return None,
};
let (total, mut max) = match tally {
Some(Tally { total, max }) => (total, max),
None => return None,
};
max = current_value.max(max);
if current_value >= max {
Some(Tally {
total: total + current_value,
max,
})
} else {
Some(Tally {
total: total - current_value,
max,
})
}
},
);
match result {
Some(Tally { total, .. }) => Some(total.to_string()),
None => None,
}
}
| true |
eeafdd246e46748861c9efec9a4ce1d0f6e81ccd
|
Rust
|
bouzuya/rust-atcoder
|
/cargo-atcoder/contests/abc296/src/bin/b.rs
|
UTF-8
| 312 | 2.640625 | 3 |
[] |
no_license
|
use proconio::{input, marker::Chars};
fn main() {
input! {
s: [Chars; 8],
};
for i in 0..8 {
let n = 8 - i;
for j in 0..8 {
let a = (b'a' + j as u8) as char;
if s[i][j] == '*' {
println!("{}{}", a, n);
}
}
}
}
| true |
81318dce492702fd85da4204de7e836ba3beca28
|
Rust
|
kakoc/leetcode
|
/src/except_self.rs
|
UTF-8
| 779 | 3.21875 | 3 |
[] |
no_license
|
pub fn product_except_self(nums: Vec<i32>) -> Vec<i32> {
let mut res = vec![0; nums.len()];
let mut p = 1;
for (i, v) in nums.iter().rev().enumerate() {
if i == 0 {
res[nums.len() - 1 - i] = 1;
p = *v;
continue;
}
res[nums.len() - 1 - i] = p;
p *= v;
}
let mut p = 1;
for (i, v) in nums.iter().enumerate() {
if i == 0 {
p *= v;
continue;
}
res[i] = p * res[i];
p *= v;
}
res
}
#[test]
fn test_except_self() {
let i = vec![1, 2, 3, 4];
let a = product_except_self(i);
assert_eq!(a, vec![24, 12, 8, 6]);
let i = vec![9, 0, -2];
let a = product_except_self(i);
assert_eq!(a, vec![0, -18, 0]);
}
| true |
507353e2300b9ffbfd7a5507ffdbdb51ce737fde
|
Rust
|
ebsnet/blockchain
|
/lib/data/src/tx.rs
|
UTF-8
| 2,121 | 3.140625 | 3 |
[] |
no_license
|
//! This module contains transaction specific data.
use hack::BigArray;
use bincode::serialize;
use failure::Error;
/// Size of a Ed25519 signature in bytes.
pub const SIG_SIZE: usize = 64;
/// Convenience type for a signature.
pub type Signature = [u8; SIG_SIZE];
/// Convenience type for signed data inside a block.
pub type BlockData = SignedData<Data>;
/// Wrapper for signed date. This struct contains the data and the signature.
#[derive(Deserialize, Serialize, Clone)]
pub struct SignedData<T> {
#[serde(with = "BigArray")]
signature: Signature,
data: T,
}
impl<T> SignedData<T> {
/// Generate a new object from supplied data and a signature.
pub fn new(signature: Signature, data: T) -> Self {
Self {
signature: signature,
data: data,
}
}
/// Returns a reference to the wrapped data.
pub fn data(&self) -> &T {
&self.data
}
/// Returns a reference to the wrapped signature.
pub fn signature(&self) -> &Signature {
&self.signature
}
}
/// Convenience type for a fingerprint.
pub type Fingerprint = Vec<u8>;
/// The data that can be contained in a block.
#[derive(Deserialize, Serialize, PartialEq, Clone)]
pub enum Data {
/// Billing operation used to initialize a billing process and indicate that a user has been
/// billed at a certain point in time.
Billing(Fingerprint),
/// Usage operation that protocols the power usage of a user.
Usage(u64),
}
/// Typed that implement this trait can be signed.
pub trait Signable {
/// Converts the data to a list of bytes that can be signed.
fn get_bytes(&self) -> Result<Vec<u8>, Error>;
}
impl Signable for Data {
fn get_bytes(&self) -> Result<Vec<u8>, Error> {
let res = serialize(self)?;
Ok(res)
}
}
impl<T> Default for SignedData<T>
where
T: Default,
{
fn default() -> Self {
Self {
signature: [0; SIG_SIZE],
data: Default::default(),
}
}
}
impl Default for Data {
fn default() -> Self {
Data::Billing(Default::default())
}
}
| true |
c8a539fe0e2a23380ab156c99430c4099d68e83c
|
Rust
|
arashout/cover-letter-generator
|
/src/blurb.rs
|
UTF-8
| 1,322 | 2.96875 | 3 |
[] |
no_license
|
use crate::rules::{Rule, apply_rules};
use crate::types::TokenizedDescription;
#[derive(Default)]
pub struct Blurb<'a> {
pub precendence: u8,
pub long_description: &'a str,
pub short_description: &'a str,
pub name: &'a str,
rules: Vec<Box<Rule>>,
}
impl<'a> Blurb<'a> {
pub fn new(name: &'a str) -> Self {
Blurb {
name: name,
precendence: 10,
long_description: &"",
short_description: &"",
rules: vec![],
}
}
pub fn with_precedence(mut self, n: u8) -> Self {
self.precendence = n;
self
}
pub fn with_description(mut self, description: &'a str) -> Self {
self.short_description = description;
self.long_description = description;
self
}
pub fn with_long_description(mut self, description: &'a str) -> Self {
self.long_description = description;
self
}
pub fn add_rule(mut self, boxed_rule: Box<Rule>) -> Self {
self.rules.push(boxed_rule);
self
}
pub fn is_applicable(&self, tokenized_description: &TokenizedDescription) -> bool {
if self.rules.len() == 0 {
return false;
}
apply_rules(tokenized_description, &self.rules)
}
}
pub type BlurbVec<'a> = Vec<Blurb<'a>>;
| true |
4ce7f85b9bc8f8e8ddf7a5c2bb82447b83da7e83
|
Rust
|
richarddowner/Rust
|
/rust-by-example/modules/modules.rs
|
UTF-8
| 975 | 3.734375 | 4 |
[] |
no_license
|
// Rust provides a powerful module system that can be used to hierarchically
// split code in logical units (modules), and manage visibility (public/priv)
// between them.
// A module is a collection of items like: functions, structs, traits, impl blocks,
// and even other modules.
fn function() {
println!("called `function()`");
}
// A module named `my`
mod my {
// A module can contain items like functions
#[allow(dead_code)]
fn function() {
println!("called `my::function()`");
}
// Modules can be nested
mod nested {
#[allow(dead_code)]
fn function() {
println!("called `my::nested::function()`");
}
}
}
fn main() {
function();
// items inside a module can be called using their full path
// the `println` function lives in the stdio module
// the `stdio` module lives in the `io` module
// and the `io` module lives in the `std` crate
std::io::stdio::println("Hello World!");
// Error! `my::function` is private
// my::function();
}
| true |
b8f9029ec2b77aa4922952530b4ef8e23da04d25
|
Rust
|
davideGiovannini/rust_sdl2_engine
|
/leek/src/lib.rs
|
UTF-8
| 3,592 | 2.78125 | 3 |
[] |
no_license
|
//! The following code creates an empty window:
//! ```
//! fn main() {
//! Engine::new("Title").start::<Game>();
//! }
//!
//!
//! struct Game;
//!
//! impl GameScene for Game {
//! fn set_up(&mut self) {}
//!
//! fn logic(&mut self, context: &EngineContext, engine: &mut Engine, ui: &Ui) -> EngineAction {
//! EngineAction::default()
//! }
//!
//! fn render(&mut self, context: &EngineContext, engine: &mut Engine, ui: &Ui) {}
//! }
//!
//! impl FromEngine for Game {
//! fn init(engine: &mut Engine) -> Self {
//! Game{}
//! }
//! }
//!
//! ```
//! Update Game struct with your desired field :)
//!
extern crate gl;
pub extern crate sdl2;
pub extern crate alto;
extern crate failure;
pub extern crate lewton;
extern crate notify;
#[macro_use]
pub extern crate imgui;
use sdl2::pixels::Color;
pub mod alto_utils;
mod engine;
mod fps_counter;
mod game_controllers;
mod imgui_backend;
mod opengl;
mod post_processing;
pub mod prelude;
mod sdl2_utils;
mod debug;
pub mod resources;
#[macro_use]
mod common_macros;
pub use post_processing::PostProcessEffect as PostProcessingEffect;
pub mod math;
pub use engine::game::{AnyGameScene, FromEngine, GameScene};
pub use game_controllers::{GameController, GameControllerManager};
pub use engine::action::EngineAction;
pub use engine::context::EngineContext;
pub use engine::Engine;
pub use sdl2_utils::log_system_info;
pub mod font;
const WINDOW_SIZE: (u32, u32) = (800, 600);
const CLEAR_COLOR: Color = Color {
r: 0,
g: 0,
b: 0,
a: 255,
};
pub struct EngineBuilder<'window> {
window_title: &'window str,
window_size: (u32, u32),
logical_size: Option<(u32, u32)>,
fullscreen: bool,
hide_cursor: bool,
relative_cursor: bool,
clear_color: Color,
imgui_font_scale: f32,
}
impl Engine {
pub fn new(window_title: &str) -> EngineBuilder {
EngineBuilder {
window_title,
window_size: WINDOW_SIZE,
logical_size: None,
clear_color: CLEAR_COLOR,
fullscreen: false,
hide_cursor: false,
relative_cursor: false,
imgui_font_scale: 1.5,
}
}
}
impl<'window> EngineBuilder<'window> {
/// Set the initial size of the window.
pub fn with_window_size(&mut self, width: u32, height: u32) -> &mut Self {
self.window_size = (width, height);
self
}
/// Set the logical render size.
pub fn with_logical_size(&mut self, width: u32, height: u32) -> &mut Self {
self.logical_size = Some((width, height));
self
}
pub fn with_clear_color(&mut self, color: Color) -> &mut Self {
self.clear_color = color;
self
}
pub fn with_fullscreen(&mut self, fullscreen: bool) -> &mut Self {
self.fullscreen = fullscreen;
self
}
pub fn with_imgui_font_scale(&mut self, font_scale: f32) -> &mut Self {
self.imgui_font_scale = font_scale;
self
}
pub fn with_hidden_cursor(&mut self, hide_cursor: bool) -> &mut Self {
self.hide_cursor = hide_cursor;
self
}
pub fn with_relative_cursor(&mut self, relative_cursor: bool) -> &mut Self {
self.relative_cursor = relative_cursor;
self
}
/// Start the engine.
pub fn start<Scene: 'static>(&mut self)
where
Scene: GameScene + FromEngine,
{
if let Err(error) = engine::run_engine::<Scene>(self) {
println!("{:?}", error)
}
}
}
// RE-EXPORTS
pub mod keyboard {
pub use sdl2::keyboard::Scancode;
}
| true |
8f22063d9137e61e624b12972c7fa844563d77e2
|
Rust
|
zargony/advent-of-code-2016
|
/src/day20.rs
|
UTF-8
| 1,553 | 3.359375 | 3 |
[] |
no_license
|
use std::num;
/// Parse multiline-text of ranges into a vector of tuples
pub fn parse(s: &str) -> Result<Vec<(u32, u32)>, num::ParseIntError> {
s.lines().map(|line| {
let mut nums = line.split('-').map(|s| s.parse::<u32>().unwrap());
Ok((nums.next().unwrap(), nums.next().unwrap()))
}).collect()
}
/// Find lowest number not covered by a list of ranges
pub fn find_lowest(ranges: &[(u32, u32)]) -> u32 {
let mut ranges = ranges.to_owned();
ranges.sort_by_key(|n| n.0);
let mut n = 0;
for (from, to) in ranges {
if from > n { break; }
if to >= n { n = to + 1; }
}
n
}
/// Find amount of numbers not covered by a list of ranges
pub fn find_uncovered(ranges: &[(u32, u32)]) -> u32 {
let mut ranges = ranges.to_owned();
ranges.sort_by_key(|n| n.0);
let mut upto = 0;
let mut count = 0;
for (from, to) in ranges {
if from > upto { count += from - upto - 1; }
if to > upto { upto = to; }
}
count
}
fn main() {
let ranges = parse(include_str!("day20.txt")).unwrap();
println!("Lowest non-blocked IP: {}", find_lowest(&ranges));
println!("Number of allowed IPs: {}", find_uncovered(&ranges));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn finding_lowest() {
let ranges = parse("5-8\n0-2\n4-7").unwrap();
assert_eq!(find_lowest(&ranges), 3);
}
#[test]
fn finding_uncovered() {
let ranges = parse("5-8\n0-2\n4-7").unwrap();
assert_eq!(find_uncovered(&ranges), 1);
}
}
| true |
ee0dd34daf298b6848c5a9ed8498979928af2d71
|
Rust
|
m1el/refterm-hash-break
|
/main.rs
|
UTF-8
| 12,123 | 2.609375 | 3 |
[] |
no_license
|
#![allow(clippy::needless_return)]
#![feature(portable_simd)]
use core_simd::Simd;
use core::convert::TryInto;
use srng::SRng;
use simd_aes::SimdAes;
const DEFAULT_SEED: Simd<u8, 16> = Simd::from_array([
178, 201, 95, 240, 40, 41, 143, 216,
2, 209, 178, 114, 232, 4, 176, 188,
]);
#[allow(non_snake_case)]
fn ComputeGlyphHash(data: &[u8]) -> Simd<u8, 16> {
let zero = Simd::splat(0);
let mut hash = Simd::<u64, 2>::from_array([data.len() as u64, 0]).to_ne_bytes();
hash ^= DEFAULT_SEED;
let mut chunks = data.chunks_exact(16);
for chunk in chunks.by_ref() {
let chunk: &[u8; 16] = chunk.try_into().unwrap();
let value = Simd::from_array(*chunk);
hash ^= value;
hash = hash.aes_dec(zero);
hash = hash.aes_dec(zero);
hash = hash.aes_dec(zero);
hash = hash.aes_dec(zero);
}
let remainder = chunks.remainder();
let mut temp = [0_u8; 16];
temp[..remainder.len()].copy_from_slice(remainder);
let value = Simd::from_array(temp);
hash ^= value;
hash = hash.aes_dec(zero);
hash = hash.aes_dec(zero);
hash = hash.aes_dec(zero);
hash = hash.aes_dec(zero);
return hash;
}
#[allow(dead_code)]
fn inv_aes_dec(mut data: Simd<u8, 16>, key: Simd<u8, 16>) -> Simd<u8, 16> {
data ^= key;
let zero = Simd::splat(0);
data = data.aes_dec_last(zero).aes_enc(zero);
return data.aes_enc_last(zero);
}
fn inv_aes_decx4(mut hash: Simd<u8, 16>) -> Simd<u8, 16> {
let zero = Simd::splat(0);
hash = hash.aes_dec_last(zero);
hash = hash.aes_enc(zero);
hash = hash.aes_enc(zero);
hash = hash.aes_enc(zero);
hash = hash.aes_enc(zero);
hash = hash.aes_enc_last(zero);
return hash;
}
fn single_prefix(count: usize, target_hash: Simd<u8, 16>) -> Simd<u8, 16> {
// The first stage looks like this:
// Hash ^ Seed = dec^4(Count ^ Seed ^ Chunk)
// To get the chunk, we need to reverse these:
// dec^-4(Hash ^ Seed) = Count ^ Seed ^ Chunk
// Chunk = dec^4(Hash ^ Seed) ^ Count ^ Seed
// To create a one-prefix initialization, we want:
// Hash = Count
// Count = Count + 16
let mut hash = target_hash;
hash = inv_aes_decx4(hash);
let prefix_init = Simd::<u64, 2>::from_array([count as u64 + 16, 0]).to_ne_bytes();
hash ^= prefix_init;
hash ^= DEFAULT_SEED;
return hash;
}
fn preimage_prefix_hash(mut hash: Simd<u8, 16>, data: &[u8]) -> Simd<u8, 16> {
let chunks = data.len() / 16;
let tail = &data[chunks*16..];
let mut tail_buf = [0_u8; 16];
tail_buf[..tail.len()].copy_from_slice(tail);
let value = Simd::from_array(tail_buf);
hash = inv_aes_decx4(hash);
hash ^= value;
for chunk in data.chunks_exact(16).rev() {
let chunk: &[u8; 16] = chunk.try_into().unwrap();
let value = Simd::from_array(*chunk);
hash = inv_aes_decx4(hash);
hash ^= value;
}
return hash;
}
fn invert_block(mut hash: Simd<u8, 16>, chunk: &[u8]) -> Simd<u8, 16> {
let chunk: &[u8; 16] = chunk.try_into().unwrap();
let value = Simd::from_array(*chunk);
hash = inv_aes_decx4(hash);
return hash ^ value;
}
fn invert_last(suffix: &[u8], mut hash: Simd<u8, 16>) -> Simd<u8, 16> {
let mut tail_buf = [0_u8; 16];
tail_buf[..suffix.len()].copy_from_slice(suffix);
let value = Simd::from_array(tail_buf);
hash = inv_aes_decx4(hash);
hash ^= value;
hash = inv_aes_decx4(hash);
return hash;
}
fn concat(prefix: Simd<u8, 16>, target: &[u8]) -> Vec<u8> {
let mut image = prefix.to_array().to_vec();
image.extend_from_slice(target);
image
}
fn prefix_collision_attack(message: &[u8]) {
let mut target_hash = Simd::<u64, 2>::from_array([message.len() as u64, 0]).to_ne_bytes();
target_hash ^= DEFAULT_SEED;
let prefix = single_prefix(message.len(), target_hash);
println!("Demonstrating prefix attack");
println!("message: {:x?}", message);
println!("hash: {:x?}", ComputeGlyphHash(b"hello"));
println!("prefix: {:x?}", prefix);
let forgery = concat(prefix, message);
println!("forgery: {:x?}", forgery);
println!("hash: {:x?}", ComputeGlyphHash(&forgery));
println!();
}
fn chosen_prefix(prefix: &[u8]) {
let zero = Simd::splat(0);
let mut message = prefix.to_vec();
let remainder = 16 - (message.len() % 16);
message.extend((0..remainder).map(|_| b'A'));
message.extend((0..16).map(|_| 0));
let hash = ComputeGlyphHash(&message);
let pre_current = invert_last(&[], hash);
let pre_target = invert_last(&[], zero);
let last = message.len() - 16;
let suffix = pre_current ^ pre_target;
message[last..].copy_from_slice(&suffix.to_array());
println!("Demonstrating chosen prefix attack");
println!("prefix: {:x?}", prefix);
println!("forgery: {:x?}", message);
println!("hash: {:x?}", ComputeGlyphHash(&message));
println!();
}
fn preimage_attack(suffix: &[u8]) {
println!("Demonstrating preimage attack");
println!("suffix: {:x?}", suffix);
let target_hash = Simd::splat(0);
println!("goal hash: {:x?}", target_hash);
let prefix_hash = preimage_prefix_hash(target_hash, suffix);
let preimage_prefix = single_prefix(suffix.len(), prefix_hash);
println!("prefix: {:x?}", preimage_prefix);
let message = concat(preimage_prefix, suffix);
println!("message: {:x?}", message);
println!("hash: {:x?}", ComputeGlyphHash(&message));
}
fn padding_attack() {
println!("Demonstrating padding attack");
println!(r#"message: "", hash: {:x?}"#, ComputeGlyphHash(b""));
println!(r#"message: "\x01", hash: {:x?}"#, ComputeGlyphHash(b"\x01"));
println!(r#"message: "A", hash: {:x?}"#, ComputeGlyphHash(b"A"));
println!(r#"message: "B\x00", hash: {:x?}"#, ComputeGlyphHash(b"B\x00"));
println!(r#"message: "BAAAAAAAAAAAAAAA", hash: {:x?}"#, ComputeGlyphHash(b"BAAAAAAAAAAAAAAA"));
println!(r#"message: "CAAAAAAAAAAAAAAA\x00", hash: {:x?}"#, ComputeGlyphHash(b"CAAAAAAAAAAAAAAA\x00"));
println!();
}
fn invert_attack(message: &[u8]) {
println!("Demonstrating invert attack, invert a hash up to 15 bytes");
println!("Note: due to padding attack, there are actually more messages");
println!("plaintext: {:x?}", message);
let mut hash = ComputeGlyphHash(message);
println!("hash: {:x?}", hash);
hash = inv_aes_decx4(hash);
hash ^= DEFAULT_SEED;
let mut buffer = hash.to_array();
let len = buffer.iter().rposition(|&chr| chr != 0).map_or(0, |x| x + 1);
if len == 16 {
println!("the plaintext mus be shorter than 16 bytes, cannot invert");
return;
}
buffer[0] ^= len as u8;
let recovered = &buffer[..len];
println!("recovered: {:x?}", recovered);
println!("hash: {:x?}", ComputeGlyphHash(recovered));
println!();
}
pub fn check_alphanum(bytes: Simd<u8, 16>) -> bool {
// check if the characters are outside of '0'..'z' range
if (bytes - Simd::splat(b'0')).lanes_gt(Simd::splat(b'z' - b'0')).any() {
return false;
}
// check if the characters are in of '9'+1..'A'-1 range
if (bytes - Simd::splat(b'9' + 1)).lanes_lt(Simd::splat(b'A' - (b'9' + 1))).any() {
return false;
}
// check if the characters are in of 'Z'+1..'a'-1 range
if (bytes - Simd::splat(b'Z' + 1)).lanes_lt(Simd::splat(b'a' - (b'Z' + 1))).any() {
return false;
}
return true;
}
use core::sync::atomic::{AtomicBool, Ordering};
static FOUND: AtomicBool = AtomicBool::new(false);
fn find_ascii_zeros(suffix: &[u8], worker: u64) {
const ATTACK_BYTES: usize = 6;
let mut target_hash = Simd::<u8, 16>::splat(0);
let mut bsuffix = suffix;
let suffix_len = 16 - ATTACK_BYTES;
let mut whole_block = false;
if suffix.len() >= suffix_len {
target_hash = preimage_prefix_hash(target_hash, &suffix[suffix_len..]);
bsuffix = &suffix[..suffix_len];
whole_block = true;
}
let mut controlled = [0u8; 16];
let total_len = ATTACK_BYTES + suffix.len();
let controlled_bytes = total_len.min(16);
let controlled = &mut controlled[..controlled_bytes];
controlled[ATTACK_BYTES..].copy_from_slice(bsuffix);
let seed = Simd::from_array([
17820195240, 4041143216,
22093178114, 2324176188,
]);
let mut rng = SRng::new(seed * Simd::splat(worker + 1));
let start = std::time::Instant::now();
for ii in 0_u64.. {
if FOUND.load(Ordering::Relaxed) {
return;
}
let prefix = rng.random_alphanum();
controlled[..6].copy_from_slice(&prefix[..6]);
let prefix = {
let prefix_hash = if whole_block {
invert_block(target_hash, controlled)
} else {
preimage_prefix_hash(target_hash, controlled)
};
single_prefix(total_len, prefix_hash)
};
if check_alphanum(prefix) {
FOUND.store(true, Ordering::Relaxed);
let mut buffer = prefix.to_array().to_vec();
buffer.extend_from_slice(&controlled[..6]);
buffer.extend_from_slice(suffix);
let elapsed = start.elapsed();
let mhs = (ii as f64) / 1e6 / elapsed.as_secs_f64();
eprintln!("found prefix in {}it {:?} {:3.3}MH/s/core", ii, elapsed, mhs);
eprintln!("hash: {:x?}", ComputeGlyphHash(&buffer));
println!("{}", core::str::from_utf8(&buffer).unwrap());
break;
}
}
}
const MESSAGE: &[&[u8]] = &[
b" Hello Casey! I hope this message finds you well.",
b" Please ignore those 22 random chars to the left for now.",
b" The work you've done on refterm is admirable. There are",
b" not enough performance conscious programmers around, and",
b" we need a demonstration of what is achievable. However,",
b" I would like to address the claim that the hash function",
b" used in refterm is 'cryptographically secure'. There is",
b" a very specific meaning attached to those words, namely:",
b" 1) it is hard to create a message for a given hash value",
b" 2) it is hard to produce two messages with the same hash",
b" If you check, the following strings have the same hash:",
b" xvD7FsaUdGy9UyjalZlFEU, 0XXPpB0wpVszsvSxgsn0su,",
b" IGNwdjol0dxLflcnfW7vsI, jcTHx0zBJbW2tdiX157RSz.",
b" In fact, every line in the message yields the exact same",
b" hash value. That is 0x00000000000000000000000000000000.",
b" I believe this was a clear enough demonstration that the",
b" hash function `ComputeGlyphHash` isn't cryptographically",
b" secure, and that an attacker can corrupt the glyph cache",
b" by printing glyphs with the same hash. The main problem",
b" with this hash function is that all operations consuming",
b" bytes are invertible. Which means an attacker could run",
b" the hash function in reverse, consuming the message from",
b" behind, and calculate the message to get the given hash.",
b" The hash is also weak to a padding attack. For example,",
br#" two strings "A" and "B\x00" yield the same hash, because"#,
b" the padding is constant, so zero byte in the end doens't",
b" matter, and the first byte is `xor`ed with input length.",
b" If you'd like to, you can read this blog post explaining",
b" these attacks in detail and how to avoid them using well",
b" known methods: https://m1el.github.io/refterm-hash",
b" Best regards, -- Igor",
];
fn main() {
padding_attack();
invert_attack(b"Qwerty123");
prefix_collision_attack(b"hello");
chosen_prefix(b"hello");
preimage_attack(b"hello");
const THREADS: u64 = 16;
for msg in MESSAGE {
FOUND.store(false, Ordering::Relaxed);
let threads = (0..THREADS)
.map(|worker| std::thread::spawn(move || find_ascii_zeros(msg, worker)))
.collect::<Vec<_>>();
for thread in threads {
thread.join().unwrap();
}
};
}
| true |
cced4a2da2d263cc5d424bf18d58bfc715bb4de4
|
Rust
|
neosmart/securestore-rs
|
/securestore/src/tests/secrets.rs
|
UTF-8
| 4,788 | 3.59375 | 4 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Highest-level tests for the secure store
use crate::errors::ErrorKind;
use crate::{KeySource, SecretsManager};
use tempfile::NamedTempFile;
/// Verify that basic storage and retrieval of secrets functions correctly.
#[test]
fn basic_store_get() {
// Create a new secrets manager with a known secret so we don't need to muck
// around with keyfiles later.
let secrets_path = NamedTempFile::new().unwrap().into_temp_path();
let mut sman = SecretsManager::new(KeySource::Password("mysecret")).unwrap();
// Make sure that we can set values in different &str/String types
sman.set("foo", "bar");
sman.set("foo", "bar".to_string());
sman.save_as(&secrets_path).unwrap();
// Do we get the same value back on get?
let getd: String = sman.get("foo").unwrap();
assert_eq!("bar", getd);
// Now open the store from the disk with the same settings and make sure the
// data remains loadable.
let sman2 = SecretsManager::load(&secrets_path, KeySource::Password("mysecret")).unwrap();
let getd: String = sman2.get("foo").unwrap();
assert_eq!("bar", getd);
}
#[test]
fn wrong_password() {
let secrets_path = NamedTempFile::new().unwrap().into_temp_path();
let mut sman = SecretsManager::new(KeySource::Password("mysecret")).unwrap();
// Set something
sman.set("foo", "foo");
// And save the store to disk
sman.save_as(&secrets_path).unwrap();
// Now try loading the store with wrong password
match SecretsManager::load(&secrets_path, KeySource::Password("notmysecret")) {
Ok(_) => panic!("Sentinel failed to detect wrong password on load"),
Err(e) => {
assert_eq!(ErrorKind::DecryptionFailure, e.kind());
}
}
}
#[test]
fn secret_not_found() {
let sman = SecretsManager::new(KeySource::Csprng).unwrap();
assert_eq!(Err(ErrorKind::SecretNotFound.into()), sman.get("foo"));
}
#[test]
fn csprng_export() {
let secrets_path = NamedTempFile::new().unwrap().into_temp_path();
let key_path = NamedTempFile::new().unwrap().into_temp_path();
{
let mut sman = SecretsManager::new(KeySource::Csprng).unwrap();
sman.export_key(&key_path).unwrap();
sman.set("foo", "bar");
sman.save_as(&secrets_path).unwrap();
}
let sman = SecretsManager::load(secrets_path, KeySource::File(key_path)).unwrap();
assert_eq!(Ok("bar".to_owned()), sman.get("foo"));
}
#[test]
fn password_export() {
let secrets_path = NamedTempFile::new().unwrap().into_temp_path();
let key_path = NamedTempFile::new().unwrap().into_temp_path();
{
let mut sman = SecretsManager::new(KeySource::Password("password123")).unwrap();
// Use legacy .export() alias .export_keyfile() to make sure it works
sman.export_keyfile(&key_path).unwrap();
sman.set("foo", "bar");
sman.save_as(&secrets_path).unwrap();
}
let sman = SecretsManager::load(secrets_path, KeySource::File(key_path)).unwrap();
assert_eq!(Ok("bar".to_owned()), sman.get("foo"));
}
#[test]
fn invalid_key_file() {
let key_path = NamedTempFile::new().unwrap().into_temp_path();
match SecretsManager::new(KeySource::File(key_path)) {
Ok(_) => panic!("SecretsManager loaded with invalid key file!"),
Err(e) => assert_eq!(ErrorKind::InvalidKeyfile, e.kind()),
}
}
#[test]
fn binary_secret() {
let mut sman = SecretsManager::new(KeySource::Csprng).unwrap();
let (key, value) = ("secret", b"Hello, world!");
sman.set(key, &value[..]);
assert_eq!(&value[..], sman.get_as::<Vec<u8>>(key).unwrap().as_slice());
}
#[test]
/// A release added generics to KeySource which were later removed because the
/// default generic fallback doesn't work on current rust versions. This had
/// let `KeySource::File(path: AsRef<Path>)` work, but broke `KeySource::Csprng`
/// and `KeySource::Password` because the `P: AsRef<Path>` wasn't defined for
/// those variants (unless it was explicitly provided, though not used).
///
/// `KeySource::File` was renamed to `KeySource::Path` and takes a `&Path` only,
/// but a function masquerading as a variant called `KeySource::File()` was
/// introduced that returns `impl GenericKeySource`, the trait which we now
/// accept in the `new()` and `load()` functions. This function is hidden from
/// the docs and is for backwards-compatibility only.
fn legacy_generic_keysource() {
// We just want to verify that this compiles, we don't test the result here.
let _ = SecretsManager::load("secrets.json", KeySource::File("secrets.key"));
}
#[test]
fn str_as_generic_keysource() {
// We just want to verify that this compiles, we don't test the result here.
let _ = SecretsManager::load("secrets.json", "secrets.key");
}
| true |
68d081dfc8abf1dcb438287b74bb54f66deef276
|
Rust
|
jneem/rust-playground
|
/src/skyline.rs
|
UTF-8
| 9,228 | 3.28125 | 3 |
[] |
no_license
|
#[deriving(Clone, Show)]
struct Building {
m: f64,
b: f64,
end: f64
}
// To prevent numerical instability, we don't allow large slopes.
static MAX_SLOPE: f64 = 1e3;
impl Building {
fn from_points(x1: f64, y1: f64, x2: f64, y2: f64) -> Building {
// To avoid NaNs, we deal with vertical line segments separately.
if x1 == x2 {
return Building {
m: 0.0,
b: y1.max(y2),
end: x1
}
}
let m_orig = (y2 - y1) / (x2 - x1);
let m = m_orig.max(-MAX_SLOPE).min(MAX_SLOPE);
let b = (y1 - m*x1).max(y2 - m*x2);
Building { m: m, b: b, end: x1.max(x2) }
}
fn intersection(&self, other: &Building) -> f64 {
let x = (other.b - self.b) / (self.m - other.m);
if x.is_nan() { Float::neg_infinity() } else { x }
}
fn conceals(&self, other: &Building, x: f64) -> bool {
self.conceals_with_intersect(other, x, self.intersection(other))
}
fn conceals_with_intersect(&self,
other: &Building,
x: f64,
intersect: f64) -> bool {
if self.m == other.m {
self.b >= other.b
} else {
(intersect <= x && self.m > other.m)
|| (intersect > x && self.m < other.m)
}
}
fn empty(end: f64) -> Building {
Building {
m: 0.0,
b: Float::neg_infinity(),
end: end
}
}
fn chop(&self, new_end: f64) -> Building {
Building {
m: self.m,
b: self.b,
end: new_end
}
}
fn y(&self, x: f64) -> f64 {
// We assume that the slope is not infinite. Then
// the only way to get NaN out of m*x + b is if
// b is infinite. But if b is infinite
// then it should be negative infinity, and we just return it.
if self.b.is_infinite() { self.b } else { self.m * x + self.b }
}
}
// FIXME: the parameter of type Option<Self> is a work-around
// for not having UFCS. See
// https://mail.mozilla.org/pipermail/rust-dev/2014-May/009850.html
pub trait Direction {
fn direction_multiplier(_: Option<Self>) -> f64;
}
#[deriving(Show)]
pub struct Up;
#[deriving(Show)]
pub struct Down;
#[deriving(Show)]
pub struct Left;
#[deriving(Show)]
pub struct Right;
impl Direction for Up { fn direction_multiplier(_: Option<Up>) -> f64 { 1.0 } }
impl Direction for Down { fn direction_multiplier(_: Option<Down>) -> f64 { -1.0 } }
impl Direction for Left { fn direction_multiplier(_: Option<Left>) -> f64 { -1.0 } }
impl Direction for Right { fn direction_multiplier(_: Option<Right>) -> f64 { 1.0 } }
pub trait Flip<T> {}
impl Flip<Up> for Down {}
impl Flip<Down> for Up {}
impl Flip<Right> for Left {}
impl Flip<Left> for Right {}
#[deriving(Clone, Show)]
pub struct Skyline<T: Direction> {
buildings: Vec<Building>
}
impl<T: Direction> Skyline<T> {
pub fn empty() -> Box<Skyline<T>> {
box Skyline {
buildings: vec![Building::empty(Float::infinity())]
}
}
pub fn single(x1: f64, y1: f64, x2: f64, y2: f64) -> Box<Skyline<T>> {
let mult = Direction::direction_multiplier(None::<T>);
let b = Building::from_points(x1, y1 * mult, x2, y2 * mult);
let start = Building::empty(x1.min(x2));
let end = Building::empty(Float::infinity());
box Skyline {
buildings: vec![start, b, end]
}
}
#[cfg(test)]
fn from_buildings(bldgs: Vec<Building>) -> Box<Skyline<T>> {
box Skyline {
buildings: bldgs
}
}
pub fn overlap<S: Flip<T>>(&self, other: &Skyline<S>) -> f64 {
let mut dist: f64 = Float::neg_infinity();
let mut start: f64 = Float::neg_infinity();
let mut i = 0u;
let mut j = 0u;
let imax = self.buildings.len();
let jmax = other.buildings.len();
while i < imax && j < jmax {
// Loop invariant: b1 and b2 start at or after `start`.
let b1 = self.buildings[i];
let b2 = other.buildings[j];
let end: f64;
if b1.end < b2.end {
end = b1.end;
i += 1;
} else {
end = b2.end;
j += 1;
}
dist = dist.max(b1.y(start) + b2.y(start));
dist = dist.max(b1.y(end) + b2.y(end));
start = end;
}
dist
}
fn first_intersection(b: &Building,
bldgs: &[Building],
mut start: f64,
idx: &mut uint) -> f64 {
let idxmax = bldgs.len();
while *idx < idxmax {
let other = &bldgs[*idx];
let intersect = b.intersection(other);
if b.conceals_with_intersect(other, start, intersect) {
if intersect > start && intersect < b.end.min(other.end) {
// This building intersects with the other one.
return intersect;
} else if b.end < other.end {
// This building ends before the other one.
return b.end;
} else {
// The other building ends first (or they end together).
*idx += 1;
start = other.end;
}
} else {
return start;
}
}
return Float::infinity();
}
fn internal_merge(in1: &[Building],
in2: &[Building],
out: &mut Vec<Building>) {
let mut start: f64 = Float::neg_infinity();
let mut i = 0u;
let mut j = 0u;
let imax = in1.len();
let jmax = in2.len();
// Loop invariant: if j == jmax then i == imax-1.
while i < imax && j < jmax {
let b1 = &in1[i];
let b2 = &in2[j];
if b1.conceals(b2, start) {
start = Skyline::<T>::first_intersection(b1, in2, start, &mut j);
out.push(b1.chop(start));
// If i == imax-1 then b1.end == inf. If in addition,
// start >= b1.end then we must have j == jmax-1
// (i.e., we're done with with input skylines).
if start >= b1.end {
i += 1;
}
} else {
start = Skyline::<T>::first_intersection(b2, in1, start, &mut i);
out.push(b2.chop(start));
if start >= b2.end {
j += 1;
}
}
}
}
pub fn merge(&mut self, other: &Skyline<T>) {
let mut new_bldgs = Vec::new();
Skyline::<T>::internal_merge(self.buildings.as_slice(),
other.buildings.as_slice(),
&mut new_bldgs);
self.buildings = new_bldgs;
}
pub fn slide(&mut self, x: f64) {
for b in self.buildings.iter_mut() {
b.end += x
}
}
pub fn bump(&mut self, y: f64) {
let y = y * Direction::direction_multiplier(None::<T>);
for b in self.buildings.iter_mut() {
b.b += y
}
}
}
#[cfg(test)]
mod test {
use test_utils::ApproxEq;
mod test_utils;
impl<'a> ApproxEq for &'a Building {
fn approx_eq<'b>(self, other: &'b Building) -> bool {
self.m.approx_eq(other.m) &&
self.b.approx_eq(other.b) &&
self.end.approx_eq(other.end)
}
}
impl<'a, T: Direction> ApproxEq for &'a Skyline<T> {
fn approx_eq<'b>(self, other: &'b Skyline<T>) -> bool {
if self.buildings.len() != other.buildings.len() {
return false;
}
for i in range(0, self.buildings.len()) {
if !self.buildings[i].approx_eq(&other.buildings[i]) {
return false;
}
}
return true;
}
}
#[test]
fn basic_skyline_merge() {
let mut sky1 = Skyline::<Up>::single(-2.0, 0.0, -1.0, 0.0);
let mut sky2 = Skyline::<Up>::single(1.0, 0.0, 2.0, 0.0);
sky2.merge(&*sky1);
let target = Skyline::from_buildings(
vec!(Building::empty(-2.0),
Building { m: 0.0, b: 0.0, end: -1.0 },
Building::empty(1.0),
Building { m: 0.0, b: 0.0, end: 2.0 },
Building::empty(Float::infinity())));
assert!(sky2.approx_eq(&*target));
sky1.merge(&*sky2);
assert!(sky1.approx_eq(&*target));
}
#[test]
fn basic_skyline_overlap() {
let sky1 = Skyline::<Up>::single(-1.0, 3.0, 1.0, 3.0);
let sky2 = Skyline::<Down>::single(-1.0, 2.0, 1.0, 2.0);
let d = sky1.overlap(&*sky2);
assert!(d.approx_eq(1.0), "d = {}, should be 1.0", d);
}
// TODO: once compilefail tests are available, add some to make
// sure we can't compare skylines with different directions.
// TODO: test slide and bump
}
| true |
fcb01a63ca183f09223a866c6536afe442a45c51
|
Rust
|
yoav-steinberg/jsonpath
|
/tests/filter.rs
|
UTF-8
| 6,059 | 2.671875 | 3 |
[
"MIT"
] |
permissive
|
#[macro_use]
extern crate serde_json;
use common::{read_json, select_and_then_compare, setup};
mod common;
#[test]
fn quote() {
setup();
select_and_then_compare(
r#"$['single\'quote']"#,
json!({"single'quote":"value"}),
json!(["value"]),
);
select_and_then_compare(
r#"$["double\"quote"]"#,
json!({"double\"quote":"value"}),
json!(["value"]),
);
}
#[test]
fn filter_next_all() {
setup();
for path in &[r#"$.*"#, r#"$[*]"#] {
select_and_then_compare(
path,
json!(["string", 42, { "key": "value" }, [0, 1]]),
json!(["string", 42, { "key": "value" }, [0, 1]]),
);
}
}
#[test]
fn filter_all() {
setup();
for path in &[r#"$..*"#, r#"$..[*]"#] {
select_and_then_compare(
path,
json!(["string", 42, { "key": "value" }, [0, 1]]),
json!([ "string", 42, { "key" : "value" }, [ 0, 1 ], "value", 0, 1 ]),
);
}
}
#[test]
fn filter_array_next_all() {
setup();
for path in &[r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] {
select_and_then_compare(
path,
json!(["string", 42, { "key": "value" }, [0, 1]]),
json!(["value", 0, 1]),
);
}
}
#[test]
fn filter_all_complex() {
setup();
for path in &[r#"$..friends.*"#, r#"$[*].friends.*"#] {
select_and_then_compare(
path,
read_json("./benchmark/data_array.json"),
json!([
{ "id" : 0, "name" : "Millicent Norman" },
{ "id" : 1, "name" : "Vincent Cannon" },
{ "id" : 2, "name" : "Gray Berry" },
{ "id" : 0, "name" : "Tillman Mckay" },
{ "id" : 1, "name" : "Rivera Berg" },
{ "id" : 2, "name" : "Rosetta Erickson" }
]),
);
}
}
#[test]
fn filter_parent_with_matched_child() {
setup();
select_and_then_compare(
"$.a[?(@.b.c == 1)]",
json!({
"a": {
"b": {
"c": 1
}
}
}),
json!([
{
"b" : {
"c" : 1
}
}
]),
);
}
#[test]
fn filter_parent_exist_child() {
setup();
select_and_then_compare(
"$.a[?(@.b.c)]",
json!({
"a": {
"b": {
"c": 1
}
}
}),
json!([
{
"b" : {
"c" : 1
}
}
]),
);
}
#[test]
fn filter_parent_paths() {
setup();
select_and_then_compare(
"$[?(@.key.subKey == 'subKey2')]",
json!([
{"key": {"seq": 1, "subKey": "subKey1"}},
{"key": {"seq": 2, "subKey": "subKey2"}},
{"key": 42},
{"some": "value"}
]),
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
);
}
#[test]
fn bugs33_exist_in_all() {
setup();
select_and_then_compare(
"$..[?(@.first.second)]",
json!({
"foo": {
"first": { "second": "value" }
},
"foo2": {
"first": {}
},
"foo3": {
}
}),
json!([
{
"first": {
"second": "value"
}
}
]),
);
}
#[test]
fn bugs33_exist_left_in_all_with_and_condition() {
setup();
select_and_then_compare(
"$..[?(@.first && @.first.second)]",
json!({
"foo": {
"first": { "second": "value" }
},
"foo2": {
"first": {}
},
"foo3": {
}
}),
json!([
{
"first": {
"second": "value"
}
}
]),
);
}
#[test]
fn bugs33_exist_right_in_all_with_and_condition() {
setup();
select_and_then_compare(
"$..[?(@.b.c.d && @.b)]",
json!({
"a": {
"b": {
"c": {
"d" : {
"e" : 1
}
}
}
}
}),
json!([
{
"b" : {
"c" : {
"d" : {
"e" : 1
}
}
}
}
]),
);
}
#[test]
fn bugs38_array_notation_in_filter() {
setup();
select_and_then_compare(
"$[?(@['key']==42)]",
json!([
{"key": 0},
{"key": 42},
{"key": -1},
{"key": 41},
{"key": 43},
{"key": 42.0001},
{"key": 41.9999},
{"key": 100},
{"some": "value"}
]),
json!([{"key": 42}]),
);
select_and_then_compare(
"$[?(@['key'].subKey == 'subKey2')]",
json!([
{"key": {"seq": 1, "subKey": "subKey1"}},
{"key": {"seq": 2, "subKey": "subKey2"}},
{"key": 42},
{"some": "value"}
]),
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
);
select_and_then_compare(
"$[?(@['key']['subKey'] == 'subKey2')]",
json!([
{"key": {"seq": 1, "subKey": "subKey1"}},
{"key": {"seq": 2, "subKey": "subKey2"}},
{"key": 42},
{"some": "value"}
]),
json!([{"key": {"seq": 2, "subKey": "subKey2"}}]),
);
select_and_then_compare(
"$..key[?(@['subKey'] == 'subKey2')]",
json!([
{"key": {"seq": 1, "subKey": "subKey1"}},
{"key": {"seq": 2, "subKey": "subKey2"}},
{"key": 42},
{"some": "value"}
]),
json!([{"seq": 2, "subKey": "subKey2"}]),
);
}
| true |
582843f9fd9e5b818bade37d0bf1ab284e8d4432
|
Rust
|
JiahaiHu/kv-server
|
/src/store/mod.rs
|
UTF-8
| 417 | 2.71875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
pub mod engine;
pub type Key = String;
pub type Value = String;
pub trait Engine {
fn get(&self, key: &Key) -> Result<Option<Value>, ()>;
fn put(&mut self, key: &Key, value: &Value) -> Result<Option<Value>, ()>;
fn delete(&mut self, key: &Key) -> Result<Option<Value>, ()>;
fn scan(&self, key_start: &Key, key_end: &Key) -> Result<Option<HashMap<Key, Value>>, ()>;
}
| true |
f3729e4c24aac1e3e0c3c89a1e7c07b2a18a1a5c
|
Rust
|
Lakelezz/white_rabbit
|
/src/lib.rs
|
UTF-8
| 10,550 | 3.53125 | 4 |
[
"ISC"
] |
permissive
|
//! *“I'm late! I'm late! For a very important date!”*
//! *by “The White Rabbit”* 『Alice's Adventures in Wonderland』
//!
//! `white_rabbit` schedules your tasks and can repeat them!
//!
//! One funny use case are chat bot commands: Imagine a *remind me*-command,
//! the command gets executed and you simply create a one-time job to be
//! scheduled for whatever time the user desires.
//!
//! We are using chrono's `DateTime<Utc>`, enabling you to serialise and thus
//! backup currently running tasks,
//! in case you want to shutdown/restart your application,
//! constructing a new scheduler is doable.
//! However, please make sure your internal clock is synced.
#![deny(rust_2018_idioms)]
use chrono::Duration as ChronoDuration;
use parking_lot::{Condvar, Mutex, RwLock};
use std::{cmp::Ordering, collections::BinaryHeap, sync::Arc, time::Duration as StdDuration};
use threadpool::ThreadPool;
pub use chrono::{DateTime, Duration, Utc};
/// Compare if an `enum`-variant matches another variant.
macro_rules! cmp_variant {
($expression:expr, $($variant:tt)+) => {
match $expression {
$($variant)+ => true,
_ => false
}
}
}
/// When a task is due, this will be passed to the task.
/// Currently, there is not much use to this. However, this might be extended
/// in the future.
pub struct Context {
time: DateTime<Utc>,
}
/// Every task will return this `enum`.
pub enum DateResult {
/// The task is considered finished and can be fully removed.
Done,
/// The task will be scheduled for a new date on passed `DateTime<Utc>`.
Repeat(DateTime<Utc>),
}
/// Every job gets a planned `Date` with the scheduler.
pub struct Date {
pub context: Context,
pub job: Box<dyn FnMut(&mut Context) -> DateResult + Send + Sync + 'static>,
}
impl Eq for Date {}
/// Invert comparisions to create a min-heap.
impl Ord for Date {
fn cmp(&self, other: &Date) -> Ordering {
match self.context.time.cmp(&other.context.time) {
Ordering::Less => Ordering::Greater,
Ordering::Greater => Ordering::Less,
Ordering::Equal => Ordering::Equal,
}
}
}
/// Invert comparisions to create a min-heap.
impl PartialOrd for Date {
fn partial_cmp(&self, other: &Date) -> Option<Ordering> {
Some(match self.context.time.cmp(&other.context.time) {
Ordering::Less => Ordering::Greater,
Ordering::Greater => Ordering::Less,
Ordering::Equal => Ordering::Equal,
})
}
}
impl PartialEq for Date {
fn eq(&self, other: &Date) -> bool {
self.context.time == other.context.time
}
}
/// The [`Scheduler`]'s worker thread switches through different states
/// while running, each state changes the behaviour.
///
/// [`Scheduler`]: struct.Scheduler.html
enum SchedulerState {
/// No dates being awaited, sleep until one gets added.
PauseEmpty,
/// Pause until next date is due.
PauseTime(StdDuration),
/// If the next date is already waiting to be executed,
/// the thread continues running without sleeping.
Run,
/// Exits the thread.
Exit,
}
impl SchedulerState {
fn is_running(&self) -> bool {
cmp_variant!(*self, SchedulerState::Run)
}
fn new_pause_time(duration: ChronoDuration) -> Self {
SchedulerState::PauseTime(
duration
.to_std()
.unwrap_or_else(|_| StdDuration::from_millis(0)),
)
}
}
/// This scheduler exists on two levels: The handle, granting you the
/// ability of adding new tasks, and the executor, dating and executing these
/// tasks when specified time is met.
///
/// **Info**: This scheduler may not be precise due to anomalies such as
/// preemption or platform differences.
pub struct Scheduler {
/// The mean of communication with the running scheduler.
condvar: Arc<(Mutex<SchedulerState>, Condvar)>,
/// Every job has its date listed inside this.
dates: Arc<RwLock<BinaryHeap<Date>>>,
}
impl Scheduler {
/// Add a task to be executed when `time` is reached.
pub fn add_task_datetime<T>(&mut self, time: DateTime<Utc>, to_execute: T)
where
T: FnMut(&mut Context) -> DateResult + Send + Sync + 'static,
{
let &(ref state_lock, ref notifier) = &*self.condvar;
let task = Date {
context: Context { time },
job: Box::new(to_execute),
};
let mut locked_heap = self.dates.write();
if locked_heap.is_empty() {
let mut scheduler_state = state_lock.lock();
let left = task.context.time.signed_duration_since(Utc::now());
if !scheduler_state.is_running() {
*scheduler_state = SchedulerState::new_pause_time(left);
notifier.notify_one();
}
} else {
let mut scheduler_state = state_lock.lock();
if let SchedulerState::PauseTime(_) = *scheduler_state {
let peeked = locked_heap.peek().expect("Expected heap to be filled.");
if task.context.time < peeked.context.time {
let left = task.context.time.signed_duration_since(Utc::now());
if !scheduler_state.is_running() {
*scheduler_state = SchedulerState::PauseTime(
left.to_std()
.unwrap_or_else(|_| StdDuration::from_millis(0)),
);
notifier.notify_one();
}
}
}
}
locked_heap.push(task);
}
pub fn add_task_duration<T>(&mut self, how_long: ChronoDuration, to_execute: T)
where
T: FnMut(&mut Context) -> DateResult + Send + Sync + 'static,
{
let time = Utc::now() + how_long;
self.add_task_datetime(time, to_execute);
}
}
fn set_state_lock(state_lock: &Mutex<SchedulerState>, to_set: SchedulerState) {
let mut state = state_lock.lock();
*state = to_set;
}
#[inline]
fn _push_and_notfiy(date: Date, heap: &mut BinaryHeap<Date>, notifier: &Condvar) {
heap.push(date);
notifier.notify_one();
}
/// This function pushes a `date` onto `data_pooled` and notifies the
/// dispatching-thread in case they are sleeping.
#[inline]
fn push_and_notfiy(
dispatcher_pair: &Arc<(Mutex<SchedulerState>, Condvar)>,
data_pooled: &Arc<RwLock<BinaryHeap<Date>>>,
when: &DateTime<Utc>,
date: Date,
) {
let &(ref state_lock, ref notifier) = &**dispatcher_pair;
let mut state = state_lock.lock();
let mut heap_lock = data_pooled.write();
if let Some(peek) = heap_lock.peek() {
if peek.context.time < *when {
let left = peek.context.time.signed_duration_since(Utc::now());
*state = SchedulerState::new_pause_time(left);
_push_and_notfiy(date, &mut heap_lock, ¬ifier);
} else {
let left = when.signed_duration_since(Utc::now());
*state = SchedulerState::new_pause_time(left);
_push_and_notfiy(date, &mut heap_lock, ¬ifier);
}
} else {
let left = when.signed_duration_since(Utc::now());
*state = SchedulerState::new_pause_time(left);
_push_and_notfiy(date, &mut heap_lock, ¬ifier);
}
}
#[must_use]
enum Break {
Yes,
No,
}
#[inline]
fn process_states(state_lock: &Mutex<SchedulerState>, notifier: &Condvar) -> Break {
let mut scheduler_state = state_lock.lock();
while let SchedulerState::PauseEmpty = *scheduler_state {
notifier.wait(&mut scheduler_state);
}
while let SchedulerState::PauseTime(duration) = *scheduler_state {
if notifier
.wait_for(&mut scheduler_state, duration)
.timed_out()
{
break;
}
}
if let SchedulerState::Exit = *scheduler_state {
return Break::Yes;
}
Break::No
}
fn dispatch_date(
threadpool: &ThreadPool,
dates: &Arc<RwLock<BinaryHeap<Date>>>,
pair_scheduler: &Arc<(Mutex<SchedulerState>, Condvar)>,
) {
let mut date = {
let mut dates = dates.write();
dates.pop().expect("Should not run on empty heap.")
};
let date_dispatcher = dates.clone();
let dispatcher_pair = pair_scheduler.clone();
threadpool.execute(move || {
if let DateResult::Repeat(when) = (date.job)(&mut date.context) {
date.context.time = when;
push_and_notfiy(&dispatcher_pair, &date_dispatcher, &when, date);
}
});
}
fn check_peeking_date(dates: &Arc<RwLock<BinaryHeap<Date>>>, state_lock: &Mutex<SchedulerState>) {
if let Some(next) = dates.read().peek() {
let now = Utc::now();
if next.context.time > now {
let left = next.context.time.signed_duration_since(now);
set_state_lock(&state_lock, SchedulerState::new_pause_time(left));
} else {
set_state_lock(&state_lock, SchedulerState::Run);
}
} else {
set_state_lock(&state_lock, SchedulerState::PauseEmpty);
}
}
impl Scheduler {
/// Creates a new [`Scheduler`] which will use `thread_count` number of
/// threads when tasks are being dispatched/dated.
///
/// [`Scheduler`]: struct.Scheduler.html
pub fn new(thread_count: usize) -> Self {
let pair = Arc::new((Mutex::new(SchedulerState::PauseEmpty), Condvar::new()));
let pair_scheduler = pair.clone();
let dates: Arc<RwLock<BinaryHeap<Date>>> = Arc::new(RwLock::new(BinaryHeap::new()));
let dates_scheduler = Arc::clone(&dates);
std::thread::spawn(move || {
let &(ref state_lock, ref notifier) = &*pair_scheduler;
let threadpool = ThreadPool::new(thread_count);
loop {
if let Break::Yes = process_states(&state_lock, ¬ifier) {
break;
}
dispatch_date(&threadpool, &dates_scheduler, &pair_scheduler);
check_peeking_date(&dates_scheduler, &state_lock);
}
});
Scheduler {
condvar: pair,
dates,
}
}
}
/// Once the scheduler is dropped, we also need to join and finish the thread.
impl<'a> Drop for Scheduler {
fn drop(&mut self) {
let &(ref state_lock, ref notifier) = &*self.condvar;
let mut state = state_lock.lock();
*state = SchedulerState::Exit;
notifier.notify_one();
}
}
| true |
faa3c8a3555c2ab38c745576140f3d8811832fd9
|
Rust
|
neont21/pjos-rust-programming
|
/chapter11/assert_format/src/lib.rs
|
UTF-8
| 341 | 3.390625 | 3 |
[
"Apache-2.0"
] |
permissive
|
pub fn greeting(name: &str) -> String {
String::from("Hello?")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn greeting_contains_name() {
let result = greeting("Peter");
assert!(result.contains("Peter"),
"Greeting did not contain name, value was '{}'",
result
);
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.