blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
5e7ed81c623de38bcfd314a86ef0baa120b8e1f6
|
Rust
|
knsd/tvdb-rs
|
/tests/lib.rs
|
UTF-8
| 959 | 2.734375 | 3 |
[
"Unlicense"
] |
permissive
|
extern crate tvdb;
use tvdb::{Tvdb, EpisodeId};
const APIKEY: &'static str = "0629B785CE550C8D";
#[test]
fn search() {
let api = Tvdb::new(APIKEY.to_owned());
let sr = api.search("scrubs", "en");
assert!(sr.ok().unwrap()[0].seriesname == "Scrubs");
}
#[test]
fn nonexist() {
let api = Tvdb::new(APIKEY);
let sr = api.search("ladlkgdklfgsdfglk", "en");
assert!(sr.is_err());
}
#[test]
fn lookup_by_epid(){
let api = Tvdb::new(APIKEY);
let ep = api.episode(EpisodeId::new(76156, "en"), 1, 2).ok().unwrap();
assert!(ep.episode_name == "My Mentor");
}
#[test]
fn lookup_by_u32(){
let api = Tvdb::new(APIKEY);
let ep = api.episode(76156, 1, 2).ok().unwrap();
assert!(ep.episode_name == "My Mentor");
}
#[test]
fn epinfo_default(){
let api = Tvdb::new(APIKEY);
let sr = api.search("scrubs", "en").ok().unwrap();
let ep = api.episode(&sr[0], 1, 2).ok().unwrap();
assert!(ep.episode_name == "My Mentor");
}
| true |
49f16e77dd64481b9e1583a956ed675c461f3b88
|
Rust
|
bobmoretti/flt2vhs
|
/patch-bms-novhs/src/main.rs
|
UTF-8
| 7,701 | 2.859375 | 3 |
[
"Zlib"
] |
permissive
|
use std::fs;
use std::path::{Path, PathBuf};
use anyhow::*;
use log::*;
use structopt::StructOpt;
/// Patch BMS to not convert FLT to VHS files when leaving 3D
#[derive(Debug, StructOpt)]
#[structopt(verbatim_doc_comment)]
struct Args {
/// Verbosity (-v, -vv, -vvv, etc.)
#[structopt(short, long, parse(from_occurrences))]
verbose: u8,
#[structopt(short, long, case_insensitive = true, default_value = "auto")]
#[structopt(name = "always/auto/never")]
color: logsetup::Color,
/// Restore BMS to its original state
/// (convert VHS files when leaving 3D)
#[structopt(short, long, verbatim_doc_comment)]
restore: bool,
/// The BMS executable to patch.
/// If unspecified, will check the registry for the BMS path.
#[structopt(name = "Falcon BMS.exe", verbatim_doc_comment)]
input: Option<PathBuf>,
}
enum BmsExeVersion {
Ver4_35_1,
Ver4_35_2,
}
struct Patch {
offset: usize,
original: &'static [u8],
replacement: &'static [u8],
}
fn main() {
run().unwrap_or_else(|e| {
error!("{:?}", e);
std::process::exit(1);
});
}
fn run() -> Result<()> {
let args = Args::from_args();
logsetup::init_logger(std::cmp::max(1, args.verbose), false, args.color);
let bms_path = find_bms(args.input)?;
let mut map = open_bms(&bms_path)?;
let bms_version = find_bms_version(&map)?;
const REPLACEMENT_NOP: &[u8] = &[0x0F, 0x1F, 0x44, 0x00, 0x00]; // lea eax, eax * 1 + 0
let patches_435u1 = vec![
Patch {
offset: 0x0002_2544,
original: &[0xE8, 0x87, 0x55, 0x00, 0x00],
replacement: REPLACEMENT_NOP,
},
Patch {
offset: 0x004D_CF68,
original: &[0xE8, 0x63, 0xAB, 0xB4, 0xFF],
replacement: REPLACEMENT_NOP,
},
];
let patches_435u2 = vec![
Patch {
offset: 0x0001_CA12,
original: &[0xE8, 0x99, 0x55, 0x00, 0x00],
replacement: REPLACEMENT_NOP,
},
Patch {
offset: 0x004F_B5C8,
original: &[0xE8, 0xE3, 0x69, 0xB2, 0xFF],
replacement: REPLACEMENT_NOP,
},
];
let patches = match bms_version {
BmsExeVersion::Ver4_35_1 => patches_435u1,
BmsExeVersion::Ver4_35_2 => patches_435u2,
};
for patch in &patches {
patch_call(&mut map, patch, args.restore)?;
}
map.flush()
.context("Couldn't save changes to Falcon BMS.exe")?;
if args.restore {
info!("BMS restored to its original state")
} else {
info!("FLT -> VHS conversion removed");
}
Ok(())
}
fn patch_call(map: &mut [u8], patch: &Patch, restore: bool) -> Result<()> {
assert_eq!(patch.original.len(), patch.replacement.len());
let patch_len = patch.original.len();
let call_to_nop = &mut map[patch.offset..patch.offset + patch_len];
ensure!(
call_to_nop.len() == patch_len,
"EXE is too short - are you sure this is BMS 4.35U1?"
);
#[allow(clippy::collapsible_else_if)]
if restore {
if call_to_nop == patch.original {
debug!(
"ACMI_ImportFile call at {:08X} is unmodified; nothing to do!",
patch.offset
);
} else if call_to_nop == patch.replacement {
debug!("Restoring call to ACMI_ImportFile at {:08X}", patch.offset);
call_to_nop.copy_from_slice(patch.original);
} else {
bail!(
"Unexpected bytes at {:X}: {:08X?}",
patch.offset,
call_to_nop
);
}
} else {
if call_to_nop == patch.original {
debug!(
"Replacing call to ACMI_ImportFile at {:08X} with no-op",
patch.offset
);
call_to_nop.copy_from_slice(patch.replacement);
} else if call_to_nop == patch.replacement {
debug!(
"ACMI_ImportFile call at {:08X} is already no-op'd; nothing to do!",
patch.offset
);
} else {
bail!(
"Unexpected bytes at {:08X}: {:x?}",
patch.offset,
call_to_nop
);
}
}
Ok(())
}
fn find_bms(input: Option<PathBuf>) -> Result<PathBuf> {
if let Some(i) = input {
debug!("User gave {} as the Falcon BMS.exe path", i.display());
return Ok(i);
}
// Check the registry if we can.
#[cfg(windows)]
{
match find_bms_from_registry() {
Err(e) => warn!("Couldn't find BMS from registry: {:?}", e),
ok => return ok,
};
}
debug!("Last try: Assuming we're in BMS/User/Acmi. Let's look in BMS/Bin...");
let last_try = Path::new("../../Bin/x64/Falcon BMS.exe");
if last_try.exists() {
Ok(last_try.to_owned())
} else {
bail!("Couldn't find Falcon BMS.exe");
}
}
#[cfg(windows)]
fn find_bms_from_registry() -> Result<PathBuf> {
use registry::*;
debug!("Looking for Falcon BMS.exe in the registry");
let key = Hive::LocalMachine
.open(
r"SOFTWARE\WOW6432Node\Benchmark Sims\Falcon BMS 4.35",
Security::Read,
)
.context("Couldn't find BMS registry key")?;
match key
.value("baseDir")
.context("Couldn't find BMS baseDir registry value")?
{
Data::String(wide) => Ok(Path::new(&wide.to_os_string()).join("Bin/x64/Falcon BMS.exe")),
_ => bail!("Expected a string for BMS baseDir, got something else"),
}
}
fn open_bms(bms: &Path) -> Result<memmap::MmapMut> {
info!("Opening {}", bms.display());
let fh = fs::OpenOptions::new()
.read(true)
.write(true)
.open(bms)
.with_context(|| format!("Couldn't open {}", bms.display()))?;
let mapping = unsafe { memmap::MmapMut::map_mut(&fh) }
.with_context(|| format!("Couldn't memory map {}", bms.display()))?;
Ok(mapping)
}
fn find_bms_version(map: &[u8]) -> Result<BmsExeVersion> {
use pelite::pe64::{Pe, PeFile};
info!("Determining BMS version.");
let bin = PeFile::from_bytes(map).context("Couldn't load file as an EXE")?;
let resources = bin.resources()?;
let version_info = resources.version_info()?;
// Get the first available language
let lang = version_info.translation()[0];
// Is this BMS?
let product_name = version_info
.value(lang, "ProductName")
.ok_or_else(|| anyhow!("Couldn't get EXE name"))?;
ensure!(
product_name == "Falcon BMS",
"EXE says it's {}, not Falcon BMS",
product_name
);
const SUPPORTED_VERSIONS: &[&str] = &["4.35.1", "4.35.2"];
let version_field = version_info
.value(lang, "ProductVersion")
.ok_or_else(|| anyhow!("Couldn't get EXE version"))?;
let vs = version_field.as_str();
info!("Version detecteed: {}", version_field);
ensure!(
SUPPORTED_VERSIONS.contains(&vs),
"Detected BMS version {} not supported. Supported versions are {:?}.",
vs,
SUPPORTED_VERSIONS
);
let version = match vs {
"4.35.1" => BmsExeVersion::Ver4_35_1,
"4.35.2" => BmsExeVersion::Ver4_35_2,
_ => unreachable!(format!("version that was detected {} is invalid.", vs)),
};
let expected_exe_size = match version {
BmsExeVersion::Ver4_35_1 => 81105920,
BmsExeVersion::Ver4_35_2 => 164310528,
};
ensure!(
map.len() == expected_exe_size,
"EXE isn't the right size - are you sure this is BMS version {}?",
vs
);
Ok(version)
}
| true |
6c6fd09986475122813c50f80d8361bed9b94038
|
Rust
|
dimforge/ncollide
|
/src/shape/convex_polyhedron.rs
|
UTF-8
| 3,017 | 3.203125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::math::{Isometry, Point, Vector};
use crate::shape::{ConvexPolygonalFeature, SupportMap};
use na::{RealField, Unit};
/// An identifier of a feature of a convex polyhedron.
///
/// This identifier is shape-dependent and is seach that it
/// allows an efficient retrieval of the geometric information of the
/// feature.
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum FeatureId {
/// Shape-dependent identifier of a vertex.
Vertex(usize),
#[cfg(feature = "dim3")]
/// Shape-dependent identifier of an edge.
Edge(usize),
/// Shape-dependent identifier of a face.
Face(usize),
// XXX: remove this variant.
/// Unknown identifier.
Unknown,
}
impl FeatureId {
/// Revries the value of the identifier if `self` is a vertex.
pub fn unwrap_vertex(self) -> usize {
match self {
FeatureId::Vertex(id) => id,
_ => panic!("The feature id does not identify a vertex."),
}
}
/// Revries the value of the identifier if `self` is an edge.
#[cfg(feature = "dim3")]
pub fn unwrap_edge(self) -> usize {
match self {
FeatureId::Edge(id) => id,
_ => panic!("The feature id does not identify an edge."),
}
}
/// Retrieves the value of the identifier if `self` is a face.
pub fn unwrap_face(self) -> usize {
match self {
FeatureId::Face(id) => id,
_ => panic!("The feature id does not identify a face."),
}
}
}
/// Trait implemented by all convex polyhedron.
pub trait ConvexPolyhedron<N: RealField + Copy>: SupportMap<N> {
/// Gets the specified vertex in the shape local-space.
fn vertex(&self, id: FeatureId) -> Point<N>;
/// Fill `face` with the geometric description of the specified face, in the shape's local-space.
fn face(&self, id: FeatureId, face: &mut ConvexPolygonalFeature<N>);
#[cfg(feature = "dim3")]
/// Get the specified edge's vertices (in the shape local-space) and the vertices' identifiers.
fn edge(&self, id: FeatureId) -> (Point<N>, Point<N>, FeatureId, FeatureId);
/// Returns any normal from the normal cone of the given feature.
fn feature_normal(&self, feature: FeatureId) -> Unit<Vector<N>>;
/// Retrieve the face (in world-space) with a normal that maximizes the scalar product with `dir`.
fn support_face_toward(
&self,
transform: &Isometry<N>,
dir: &Unit<Vector<N>>,
out: &mut ConvexPolygonalFeature<N>,
);
/// Retrieve the feature (in world-space) which normal cone contains `dir`.
fn support_feature_toward(
&self,
transform: &Isometry<N>,
dir: &Unit<Vector<N>>,
_angle: N,
out: &mut ConvexPolygonalFeature<N>,
);
/// Retrieve the identifier of the feature which normal cone contains `dir`.
fn support_feature_id_toward(&self, local_dir: &Unit<Vector<N>>) -> FeatureId;
}
| true |
9511848ad213c5b9ab655be2589752c1c419db6b
|
Rust
|
blu-dev/lua-replace
|
/src/raw_dump.rs
|
UTF-8
| 2,230 | 3.125 | 3 |
[] |
no_license
|
fn print_dump_header(ptr: u64, length: usize) {
print!("{:width$} ", " ", width = format!("{:X}", ptr + (length as u64)).len());
for x in 0..8 {
print!("{:02X} ", x);
}
print!(" ");
for x in 8..0x10 {
print!("{:02X} ", x);
}
print!("│ ");
for x in 0..0x10 {
print!("{:X}", x);
}
print!("\n");
print!("{:width$} ", " ", width = format!("{:X}", ptr + (length as u64)).len());
for x in 0..8 {
print!("───");
}
print!("─");
for x in 8..0x10 {
print!("───");
}
print!("┼─");
for x in 0..0x10 {
print!("─");
}
print!("\n");
}
fn print_address(address: u64, length: usize) {
print!("{:0width$X} ", address, width = format!("{:X}", address + length as u64).len());
}
fn print_raw(address: u64, count: usize) {
unsafe {
for by in 0..0x10 {
if by >= count {
print!(" ");
}
else {
print!("{:02X} ", *((address + (by as u64)) as *mut u8));
}
if by == 0x7 {
print!(" ")
}
else if by == 0xF {
print!("│ ");
}
}
}
}
// Copied from skyline-rs, thanks jam1garner
fn to_ascii_dots(x: u8) -> char {
match x {
0..=0x1F | 0x7F..=0xA0 | 0xAD => '.',
x => x as char,
}
}
fn print_pretty(address: u64, count: usize) {
unsafe {
for ch in 0..count {
print!("{}", to_ascii_dots(*((address + (ch as u64)) as *mut u8)));
}
print!("\n");
}
}
fn print_row(ptr: u64, length: usize, row: usize) {
let row_address: u64 = ptr + ((row * 0x10) as u64);
let mut bytes_to_print: usize = length - row * 0x10;
if bytes_to_print > 0x10 {
bytes_to_print = 0x10;
}
print_address(row_address, length);
print_raw(row_address, bytes_to_print);
print_pretty(row_address, bytes_to_print);
}
pub fn perform(ptr: u64, length: usize) {
let mut rows = length / 0x10;
if length % 0x10 != 0 {
rows += 1;
}
print_dump_header(ptr, length);
for row in 0..rows {
print_row(ptr, length, row);
}
}
| true |
58e0846060a57c41a98b26db636c19f55c7385ae
|
Rust
|
DjaPy/test_dns_rust_server
|
/src/main.rs
|
UTF-8
| 26,179 | 3.4375 | 3 |
[] |
no_license
|
use std::net::{Ipv4Addr, UdpSocket, Ipv6Addr};
type Error = Box<dyn std::error::Error>;
type CommonResult<T> = std::result::Result<T, Error>;
pub struct BytePacketBuffer {
pub buf: [u8; 512],
pub pos: usize,
}
impl BytePacketBuffer {
/// This gives us a fresh buffer for holding the packet contents, and a
/// field for keeping track of where we are.
pub fn new() -> BytePacketBuffer {
BytePacketBuffer {
buf: [0; 512],
pos: 0,
}
}
/// Current position within buffer
fn pos(&self) -> usize {
self.pos
}
/// Step the buffer position forward a specific number of steps
fn step(&mut self, steps: usize) -> CommonResult<()> {
self.pos += steps;
Ok(())
}
/// Change the buffer position
fn seek(&mut self, pos: usize) -> CommonResult<()> {
self.pos = pos;
Ok(())
}
/// Read a single byte and move the position one step forward
fn read(&mut self) -> CommonResult<u8> {
if self.pos >= 512 {
return Err("End of buffer".into());
}
let res = self.buf[self.pos];
self.pos += 1;
Ok(res)
}
/// Get a single byte, without changing the buffer position
fn get(&mut self, pos: usize) -> CommonResult<u8> {
if pos >= 512 {
return Err("End of buffer".into());
}
Ok(self.buf[pos])
}
/// Get a range of bytes
fn get_range(&mut self, start: usize, len: usize) -> CommonResult<&[u8]> {
if start + len >= 512 {
return Err("End of buffer".into());
}
Ok(&self.buf[start..start + len as usize])
}
/// Read two bytes, stepping two steps forward
fn read_u16(&mut self) -> CommonResult<u16> {
let res = ((self.read()? as u16) << 8) | (self.read()? as u16);
Ok(res)
}
/// Read four bytes, stepping four steps forward
fn read_u32(&mut self) -> CommonResult<u32> {
let res = ((self.read()? as u32) << 24)
| ((self.read()? as u32) << 16)
| ((self.read()? as u32) << 8)
| ((self.read()? as u32) << 0);
Ok(res)
}
/// Read a qname
///
/// The tricky part: Reading domain names, taking labels into consideration.
/// Will take something like [3]www[6]google[3]com[0] and append
/// www.google.
fn read_qname(&mut self, outstr: &mut String) -> CommonResult<()> {
// Since we might encounter jumps, we'll keep track of our position
// locally as opposed to using the position within the struct. This
// allows us to move the shared position to a point past our current
// qname, while keeping track of our progress on the current qname
// using this variable.
let mut pos = self.pos();
// track whether or not we've jumped
let mut jumped = false;
let max_jumps = 5;
let mut jumps_performed = 0;
// Our delimiter which we append for each label. Since we don't want a
// dot at the beginning of the domain name we'll leave it empty for now
// and set it to "." at the end of the first iteration.
let mut delim = "";
loop {
// Dns Packets are untrusted data, so we need to be paranoid. Someone
// can craft a packet with a cycle in the jump instructions. This guards
// against such packets.
if jumps_performed > max_jumps {
return Err(format!("Limit of {} jumps exceeded", max_jumps).into());
}
// At this point, we're always at the beginning of a label. Recall
// that labels start with a length byte.
let len = self.get(pos)?;
// If len has the two most significant bit are set, it represents a
// jump to some other offset in the packet:
if (len & 0xC0) == 0xC0 {
// Update the buffer position to a point past the current
// label. We don't need to touch it any further.
if !jumped {
self.seek(pos + 2)?;
}
// Read another byte, calculate offset and perform the jump by
// updating our local position variable
let b2 = self.get(pos + 1)? as u16;
let offset = (((len as u16) ^ 0xC0) << 8) | b2;
pos = offset as usize;
// Indicate that a jump was performed.
jumped = true;
jumps_performed += 1;
continue;
}
// The base scenario, where we're reading a single label and
// appending it to the output:
else {
// Move a single byte forward to move past the length byte.
pos += 1;
// Domain names are terminated by an empty label of length 0,
// so if the length is zero we're done.
if len == 0 {
break;
}
// Append the delimiter to our output buffer first.
outstr.push_str(delim);
// Extract the actual ASCII bytes for this label and append them
// to the output buffer.
let str_buffer = self.get_range(pos, len as usize)?;
outstr.push_str(&String::from_utf8_lossy(str_buffer).to_lowercase());
delim = ".";
// Move forward the full length of the label.
pos += len as usize;
}
}
if !jumped {
self.seek(pos)?;
}
Ok(())
}
fn write(&mut self, val: u8) -> CommonResult<()> {
if self.pos >= 512 {
return Err("End of buffer".into())
}
self.buf[self.pos] = val;
self.pos += 1;
Ok(())
}
fn write_u8(&mut self, val: u8) -> CommonResult<()> {
self.write(val)?;
Ok(())
}
fn write_u16(&mut self, val:u16) -> CommonResult<()> {
self.write((val >> 8) as u8)?;
self.write((val & 0xFF) as u8)?;
Ok(())
}
fn write_u32(&mut self, val: u32) -> CommonResult<()> {
self.write(((val >> 24) & 0xFF) as u8)?;
self.write(((val >> 16) & 0xFF) as u8)?;
self.write(((val >> 8) & 0xFF) as u8)?;
self.write(((val >> 0) & 0xFF) as u8)?;
Ok(())
}
fn write_qname(&mut self, qname: &str) -> CommonResult<()> {
for label in qname.split('.') {
let len = label.len();
if len > 0x3f {
return Err("Single label exceeds 63 characters of length".into())
}
self.write_u8(len as u8)?;
for b in label.as_bytes() {
self.write_u8(*b)?;
}
}
self.write_u8(0)?;
Ok(())
}
fn set(&mut self, pos: usize, val: u8) -> CommonResult<()> {
self.buf[pos] = val;
Ok(())
}
fn set_u16(&mut self, pos: usize, val: u16) -> CommonResult<()> {
self.set(pos, (val >> 8) as u8)?;
self.set(pos + 1, (val & 0xFF) as u8)?;
Ok(())
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ResultCode {
NOERROR = 0,
FORMERR = 1,
SERVFAIL = 2,
NXDOMAIN = 3,
NOTIMP = 4,
REFUSED = 5,
}
impl ResultCode {
pub fn from_num(num: u8) -> ResultCode {
match num {
1 => ResultCode::FORMERR,
2 => ResultCode::SERVFAIL,
3 => ResultCode::NXDOMAIN,
4 => ResultCode::NOTIMP,
5 => ResultCode::REFUSED,
0 | _ => ResultCode::NOERROR,
}
}
}
#[derive(Clone, Debug)]
pub struct DnsHeader {
pub id: u16, // 16 bits
pub recursion_desired: bool, // 1 bit
pub truncated_message: bool, // 1 bit
pub authoritative_answer: bool, // 1 bit
pub opcode: u8, // 4 bits
pub response: bool, // 1 bit
pub rescode: ResultCode, // 4 bits
pub checking_disabled: bool, // 1 bit
pub authed_data: bool, // 1 bit
pub z: bool, // 1 bit
pub recursion_available: bool, // 1 bit
pub questions: u16, // 16 bits
pub answers: u16, // 16 bits
pub authoritative_entries: u16, // 16 bits
pub resource_entries: u16, // 16 bits
}
impl DnsHeader {
pub fn new() -> DnsHeader {
DnsHeader {
id: 0,
recursion_desired: false,
truncated_message: false,
authoritative_answer: false,
opcode: 0,
response: false,
rescode: ResultCode::NOERROR,
checking_disabled: false,
authed_data: false,
z: false,
recursion_available: false,
questions: 0,
answers: 0,
authoritative_entries: 0,
resource_entries: 0,
}
}
pub fn read(&mut self, buffer: &mut BytePacketBuffer) -> CommonResult<()> {
self.id = buffer.read_u16()?;
let flags = buffer.read_u16()?;
let a = (flags >> 8) as u8;
let b = (flags & 0xFF) as u8;
self.recursion_desired = (a & (1 << 0)) > 0;
self.truncated_message = (a & (1 << 1)) > 0;
self.authoritative_answer = (a & (1 << 2)) > 0;
self.opcode = (a >> 3) & 0x0F;
self.response = (a & (1 << 7)) > 0;
self.rescode = ResultCode::from_num(b & 0x0F);
self.checking_disabled = (b & (1 << 4)) > 0;
self.authed_data = (b & (1 << 5)) > 0;
self.z = (b & (1 << 6)) > 0;
self.recursion_available = (b & (1 << 7)) > 0;
self.questions = buffer.read_u16()?;
self.answers = buffer.read_u16()?;
self.authoritative_entries = buffer.read_u16()?;
self.resource_entries = buffer.read_u16()?;
// Return the constant header size
Ok(())
}
pub fn write(&self, buffer: &mut BytePacketBuffer) -> CommonResult<()> {
buffer.write_u16(self.id)?;
buffer.write_u8(
(self.recursion_desired as u8)
| ((self.truncated_message as u8) << 1)
| ((self.authoritative_answer as u8) << 2)
| (self.opcode << 3)
| ((self.response as u8) << 7) as u8,
)?;
buffer.write_u8(
(self.rescode as u8)
| ((self.checking_disabled as u8) << 4)
| ((self.authed_data as u8) << 5)
| ((self.z as u8) << 6)
| ((self.recursion_available as u8) << 7)
)?;
buffer.write_u16(self.questions)?;
buffer.write_u16(self.answers)?;
buffer.write_u16(self.authoritative_entries)?;
buffer.write_u16(self.resource_entries)?;
Ok(())
}
}
#[derive(PartialEq, Eq, Debug, Clone, Hash, Copy)]
pub enum QueryType {
UNKNOWN(u16),
A,
NS,
CNAME,
MX,
AAAA,
}
impl QueryType {
pub fn to_num(&self) -> u16 {
match *self {
QueryType::UNKNOWN(x) => x,
QueryType::A => 1,
QueryType::NS => 2,
QueryType::CNAME => 5,
QueryType::MX => 15,
QueryType::AAAA => 28,
}
}
pub fn from_num(num: u16) -> QueryType {
match num {
1 => QueryType::A,
2 => QueryType::NS,
5 => QueryType::CNAME,
15 => QueryType::MX,
28 => QueryType::AAAA,
_ => QueryType::UNKNOWN(num),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DnsQuestion {
pub name: String,
pub qtype: QueryType,
}
impl DnsQuestion {
pub fn new(name: String, qtype: QueryType) -> DnsQuestion {
DnsQuestion { name, qtype }
}
pub fn read(&mut self, buffer: &mut BytePacketBuffer) -> CommonResult<()> {
buffer.read_qname(&mut self.name)?;
self.qtype = QueryType::from_num(buffer.read_u16()?); // qtype
let _ = buffer.read_u16()?; // class
Ok(())
}
pub fn write(&self, buffer: &mut BytePacketBuffer) -> CommonResult<()> {
buffer.write_qname(&self.name)?;
let typenum = self.qtype.to_num();
buffer.write_u16(typenum)?;
buffer.write_u16(1)?;
Ok(())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[allow(dead_code)]
pub enum DnsRecord {
UNKNOWN {
domain: String,
qtype: u16,
data_len: u16,
ttl: u32,
}, // 0
A {
domain: String,
addr: Ipv4Addr,
ttl: u32,
}, // 1
NS {
domain: String,
host: String,
ttl: u32,
}, // 2
CNAME {
domain: String,
host: String,
ttl: u32,
}, // 5
MX {
domain: String,
priority: u16,
host: String,
ttl: u32,
}, // 15
AAAA {
domain: String,
addr: Ipv6Addr,
ttl: u32,
}, // 28
}
impl DnsRecord {
pub fn read(buffer: &mut BytePacketBuffer) -> CommonResult<DnsRecord> {
let mut domain = String::new();
buffer.read_qname(&mut domain)?;
let qtype_num = buffer.read_u16()?;
let qtype = QueryType::from_num(qtype_num);
let _ = buffer.read_u16()?;
let ttl = buffer.read_u32()?;
let data_len = buffer.read_u16()?;
match qtype {
QueryType::A => {
let raw_addr = buffer.read_u32()?;
let addr = Ipv4Addr::new(
((raw_addr >> 24) & 0xFF) as u8,
((raw_addr >> 16) & 0xFF) as u8,
((raw_addr >> 8) & 0xFF) as u8,
((raw_addr >> 0) & 0xFF) as u8,
);
Ok(DnsRecord::A {
domain: domain,
addr: addr,
ttl: ttl,
})
}
QueryType::AAAA => {
let raw_addr1 = buffer.read_u32()?;
let raw_addr2 = buffer.read_u32()?;
let raw_addr3 = buffer.read_u32()?;
let raw_addr4 = buffer.read_u32()?;
let addr = Ipv6Addr::new(
((raw_addr1 >> 16) & 0xFFFF) as u16,
((raw_addr1 >> 0) & 0xFFFF) as u16,
((raw_addr2 >> 16) & 0xFFFF) as u16,
((raw_addr2 >> 0) & 0xFFFF) as u16,
((raw_addr3 >> 16) & 0xFFFF) as u16,
((raw_addr3 >> 0) & 0xFFFF) as u16,
((raw_addr4 >> 16) & 0xFFFF) as u16,
((raw_addr4 >> 0) & 0xFFFF) as u16,
);
Ok(DnsRecord::AAAA {
domain: domain,
addr: addr ,
ttl: ttl,
})
}
QueryType::NS => {
let mut ns = String::new();
buffer.read_qname(&mut ns)?;
Ok(DnsRecord::NS {
domain: domain,
host: ns,
ttl: ttl,
})
}
QueryType::CNAME => {
let mut cname = String::new();
buffer.read_qname(&mut cname)?;
Ok(DnsRecord::CNAME {
domain: domain,
host: cname,
ttl: ttl,
})
}
QueryType::MX => {
let priority = buffer.read_u16()?;
let mut mx = String::new();
buffer.read_qname(&mut mx)?;
Ok(DnsRecord::MX {
domain: domain,
priority: priority,
host: mx,
ttl: ttl,
})
}
QueryType::UNKNOWN(_) => {
buffer.step(data_len as usize)?;
Ok(DnsRecord::UNKNOWN {
domain: domain,
qtype: qtype_num,
data_len: data_len,
ttl: ttl,
})
}
}
}
pub fn write(&self, buffer: &mut BytePacketBuffer) -> CommonResult<usize> {
let start_pos = buffer.pos();
match *self {
DnsRecord::A {
ref domain,
ref addr,
ttl,
} => {
buffer.write_qname(domain)?;
buffer.write_u16(QueryType::A.to_num())?;
buffer.write_u16(1)?;
buffer.write_u32(ttl)?;
buffer.write_u16(4)?;
let octets = addr.octets();
buffer.write_u8(octets[0])?;
buffer.write_u8(octets[1])?;
buffer.write_u8(octets[2])?;
buffer.write_u8(octets[3])?;
}
DnsRecord::NS {
ref domain,
ref host,
ttl,
} => {
buffer.write_qname(domain)?;
buffer.write_u16(QueryType::NS.to_num())?;
buffer.write_u16(1)?;
buffer.write_u32(ttl)?;
let pos = buffer.pos();
buffer.write_u16(0)?;
buffer.write_qname(host)?;
let size = buffer.pos() - (pos + 2);
buffer.set_u16(pos, size as u16)?;
}
DnsRecord::CNAME {
ref domain,
ref host,
ttl,
} => {
buffer.write_qname(domain)?;
buffer.write_u16(QueryType::CNAME.to_num())?;
buffer.write_u16(1)?;
buffer.write_u32(ttl)?;
let pos = buffer.pos();
buffer.write_u16(0)?;
buffer.write_qname(host)?;
let size = buffer.pos() - (pos + 2);
buffer.set_u16(pos, size as u16)?;
}
DnsRecord::MX {
ref domain,
priority,
ref host,
ttl
} => {
buffer.write_qname(domain)?;
buffer.write_u16(QueryType::MX.to_num())?;
buffer.write_u16(1)?;
buffer.write_u32(ttl)?;
let pos = buffer.pos();
buffer.write_u16(priority)?;
buffer.write_qname(host)?;
let size = buffer.pos() - (pos + 2);
buffer.set_u16(pos, size as u16)?;
}
DnsRecord::AAAA {
ref domain,
ref addr,
ttl,
} => {
buffer.write_qname(domain)?;
buffer.write_u16(QueryType::AAAA.to_num())?;
buffer.write_u16(1)?;
buffer.write_u32(ttl)?;
buffer.write_u16(16)?;
for octet in &addr.segments() {
buffer.write_u16(*octet)?;
}
}
DnsRecord::UNKNOWN { .. } => {
println!("Skipping record: {:?}", self);
}
}
Ok(buffer.pos() - start_pos)
}
}
#[derive(Clone, Debug)]
pub struct DnsPacket {
pub header: DnsHeader,
pub questions: Vec<DnsQuestion>,
pub answers: Vec<DnsRecord>,
pub authorities: Vec<DnsRecord>,
pub resources: Vec<DnsRecord>,
}
impl DnsPacket {
pub fn new() -> DnsPacket {
DnsPacket {
header: DnsHeader::new(),
questions: Vec::new(),
answers: Vec::new(),
authorities: Vec::new(),
resources: Vec::new(),
}
}
pub fn from_buffer(buffer: &mut BytePacketBuffer) -> CommonResult<DnsPacket> {
let mut result = DnsPacket::new();
result.header.read(buffer)?;
for _ in 0..result.header.questions {
let mut question = DnsQuestion::new("".to_string(), QueryType::UNKNOWN(0));
question.read(buffer)?;
result.questions.push(question);
}
for _ in 0..result.header.answers {
let rec = DnsRecord::read(buffer)?;
result.answers.push(rec);
}
for _ in 0..result.header.authoritative_entries {
let rec = DnsRecord::read(buffer)?;
result.authorities.push(rec);
}
for _ in 0..result.header.resource_entries {
let rec = DnsRecord::read(buffer)?;
result.resources.push(rec);
}
Ok(result)
}
pub fn write(&mut self, buffer: &mut BytePacketBuffer) -> CommonResult<()> {
self.header.questions = self.questions.len() as u16;
self.header.answers = self.answers.len() as u16;
self.header.authoritative_entries = self.resources.len() as u16;
self.header.resource_entries = self.resources.len() as u16;
self.header.write(buffer)?;
for question in &self.questions {
question.write(buffer)?;
}
for rec in &self.answers {
rec.write(buffer)?;
}
for rec in &self.authorities {
rec.write(buffer)?;
}
for rec in &self.resources {
rec.write(buffer)?;
}
Ok(())
}
pub fn get_random_a(&self) -> Option<Ipv4Addr> {
self.answers
.iter()
.filter_map(|record| match record {
DnsRecord::A { addr, .. } => Some(*addr),
_ => None,
})
.next()
}
fn get_ns<'a>(&'a self, qname: &'a str) -> impl Iterator<Item = (&'a str, &'a str)>{
self.authorities
.iter()
.filter_map(|record| match record {
DnsRecord::NS { domain, host, ..} => Some((domain.as_str(), host.as_str())),
_ => None,
})
.filter(move |(domain, _)| qname.ends_with(*domain))
}
pub fn get_resolved_ns(&self, qname: &str) -> Option<Ipv4Addr> {
self.get_ns(qname)
.flat_map(|(_, host)| {
self.resources
.iter()
.filter_map(move |record| match record {
DnsRecord::A { domain, addr, .. } if domain == host => Some(addr),
_ => None,
})
})
.map(|addr| *addr)
.next()
}
pub fn get_unresolved_ns<'a>(&'a self, qname: &'a str) -> Option<&'a str> {
self.get_ns(qname)
.map(|(_, host)| host)
.next()
}
}
fn lookup(qname: &str, qtype: QueryType, server: (Ipv4Addr, u16)) -> CommonResult<DnsPacket> {
let socket = UdpSocket::bind(("0.0.0.0", 43210))?;
let mut packet = DnsPacket::new();
packet.header.id = 6666;
packet.header.questions = 1;
packet.header.recursion_desired = true;
packet
.questions
.push(DnsQuestion::new(qname.to_string(), qtype));
let mut req_buffer = BytePacketBuffer::new();
packet.write(&mut req_buffer)?;
socket.send_to(&req_buffer.buf[0..req_buffer.pos], server)?;
let mut res_buffer = BytePacketBuffer::new();
socket.recv_from(&mut res_buffer.buf)?;
DnsPacket::from_buffer(&mut res_buffer)
}
fn recursive_lookup(qname: &str, qtype: QueryType) -> CommonResult<DnsPacket> {
let mut ns = "198.41.0.4".parse::<Ipv4Addr>().unwrap();
loop {
println!("attemptin lookup of {:?} {} with ns {}", qtype, qname, ns);
let ns_copy = ns;
let server = (ns_copy, 53);
let response = lookup(qname, qtype, server)?;
if !response.answers.is_empty() && response.header.rescode == ResultCode::NOERROR {
return Ok(response);
}
if response.header.rescode == ResultCode::NXDOMAIN {
return Ok(response);
}
if let Some(new_ns) = response.get_resolved_ns(qname) {
ns = new_ns;
continue;
}
let new_ns_name = match response.get_unresolved_ns(qname) {
Some(x) => x,
None => return Ok(response),
};
let recursive_response = recursive_lookup(&new_ns_name, QueryType::A)?;
if let Some(new_ns) = recursive_response.get_random_a() {
ns = new_ns;
} else {
return Ok(response);
}
}
}
fn handle_query(socket: &UdpSocket) -> CommonResult<()> {
let mut req_buffer = BytePacketBuffer::new();
let (_, src) = socket.recv_from(&mut req_buffer.buf)?;
let mut request = DnsPacket::from_buffer(&mut req_buffer)?;
let mut packet = DnsPacket::new();
packet.header.id = request.header.id;
packet.header.recursion_desired = true;
packet.header.recursion_available = true;
packet.header.response = true;
if let Some(question) = request.questions.pop() {
println!("Received query: {:?}", question );
if let Ok(result) = recursive_lookup(&question.name, question.qtype) {
packet.questions.push(question.clone());
packet.header.rescode = result.header.rescode;
for rec in result.answers {
println!("Answer: {:?}", rec);
packet.answers.push(rec);
}
for rec in result.authorities {
println!("Authority: {:?}", rec);
packet.authorities.push(rec);
}
for rec in result.resources {
println!("Resource: {:?}", rec);
packet.resources.push(rec);
}
} else {
packet.header.rescode = ResultCode::SERVFAIL;
}
} else {
packet.header.rescode = ResultCode::FORMERR;
}
let mut res_buffer = BytePacketBuffer::new();
packet.write(&mut res_buffer)?;
let len = res_buffer.pos();
let data = res_buffer.get_range(0, len)?;
socket.send_to(data, src)?;
Ok(())
}
fn main() -> CommonResult<()> {
let socket = UdpSocket::bind(("0.0.0.0", 2053))?;
loop {
match handle_query(&socket) {
Ok(_) => {},
Err(e) => eprintln!("An error occured: {}", e),
}
}
}
| true |
c0edea91e796c666af8adb84419df9c6cb4d6dfd
|
Rust
|
winksaville/fuchsia
|
/third_party/rust_crates/vendor/tokio-sync/tests/atomic_task.rs
|
UTF-8
| 1,532 | 2.78125 | 3 |
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
#![deny(warnings)]
extern crate futures;
extern crate tokio_mock_task;
extern crate tokio_sync;
use futures::task::{self, Task};
use tokio_mock_task::*;
use tokio_sync::task::AtomicTask;
trait AssertSend: Send {}
trait AssertSync: Send {}
impl AssertSend for AtomicTask {}
impl AssertSync for AtomicTask {}
impl AssertSend for Task {}
impl AssertSync for Task {}
#[test]
fn register_task() {
// AtomicTask::register_task should *always* register the
// arbitrary task.
let atomic = AtomicTask::new();
let mut mock1 = MockTask::new();
let mut mock2 = MockTask::new();
// Register once...
mock1.enter(|| atomic.register());
// Grab the actual 2nd task from the mock...
let task2 = mock2.enter(task::current);
// Now register the 2nd task, even though in the context where
// the first task would be considered 'current'...
{
// Need a block to grab a reference, so that we only move
// task2 into the closure, not the AtomicTask...
let atomic = &atomic;
mock1.enter(move || {
atomic.register_task(task2);
});
}
// Just proving that they haven't been notified yet...
assert!(!mock1.is_notified(), "mock1 shouldn't be notified yet");
assert!(!mock2.is_notified(), "mock2 shouldn't be notified yet");
// Now trigger the notify, and ensure it was task2
atomic.notify();
assert!(!mock1.is_notified(), "mock1 shouldn't be notified");
assert!(mock2.is_notified(), "mock2 should be notified");
}
| true |
fb02ba2adc49f942876d1c259d7e297cb53b1f13
|
Rust
|
jqnatividad/qsv
|
/tests/test_pseudo.rs
|
UTF-8
| 1,581 | 2.703125 | 3 |
[
"MIT",
"Unlicense"
] |
permissive
|
use crate::workdir::Workdir;
#[test]
fn pseudo() {
let wrk = Workdir::new("pseudo");
wrk.create(
"data.csv",
vec![
svec!["name", "colors"],
svec!["Mary", "yellow"],
svec!["John", "blue"],
svec!["Mary", "purple"],
svec!["Sue", "orange"],
svec!["John", "magenta"],
svec!["Mary", "cyan"],
],
);
let mut cmd = wrk.command("pseudo");
cmd.arg("name").arg("data.csv");
let got: Vec<Vec<String>> = wrk.read_stdout(&mut cmd);
let expected = vec![
svec!["name", "colors"],
svec!["0", "yellow"],
svec!["1", "blue"],
svec!["0", "purple"],
svec!["2", "orange"],
svec!["1", "magenta"],
svec!["0", "cyan"],
];
assert_eq!(got, expected);
}
#[test]
fn pseudo_no_headers() {
let wrk = Workdir::new("pseudo");
wrk.create(
"data.csv",
vec![
svec!["Mary", "yellow"],
svec!["John", "blue"],
svec!["Mary", "purple"],
svec!["Sue", "orange"],
svec!["John", "magenta"],
svec!["Mary", "cyan"],
],
);
let mut cmd = wrk.command("pseudo");
cmd.arg("1").arg("--no-headers").arg("data.csv");
let got: Vec<Vec<String>> = wrk.read_stdout(&mut cmd);
let expected = vec![
svec!["0", "yellow"],
svec!["1", "blue"],
svec!["0", "purple"],
svec!["2", "orange"],
svec!["1", "magenta"],
svec!["0", "cyan"],
];
assert_eq!(got, expected);
}
| true |
bf62f0208fe877b76d661ff7ae20bb335c81cf56
|
Rust
|
y-usuzumi/survive-the-course
|
/survive-the-course-rs/src/problems/leetcode_cn/_53_maximum_subarray.rs
|
UTF-8
| 746 | 3.546875 | 4 |
[
"BSD-3-Clause"
] |
permissive
|
// https://leetcode-cn.com/problems/maximum-subarray/
pub struct Solution;
impl Solution {
pub fn max_sub_array(nums: Vec<i32>) -> i32 {
let mut curr_max = i32::MIN;
let mut curr_sum = 0;
for num in nums {
curr_sum = std::cmp::max(curr_sum + num, num);
curr_max = std::cmp::max(curr_max, curr_sum);
}
curr_max
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_1() {
assert_eq!(Solution::max_sub_array(vec![-2,1,-3,4,-1,2,1,-5,4]), 6);
}
#[test]
fn test_2() {
assert_eq!(Solution::max_sub_array(vec![1]), 1);
}
#[test]
fn test_3() {
assert_eq!(Solution::max_sub_array(vec![5,4,-1,7,8]), 23);
}
}
| true |
ee20dbab44e247077a9228724916947cc34914fb
|
Rust
|
jvff/ioc-test
|
/src/line/line_codec.rs
|
UTF-8
| 1,227 | 3 | 3 |
[] |
no_license
|
use std::io;
use std::str;
use bytes::BytesMut;
use tokio_io::codec::{Decoder, Encoder};
fn invalid_utf8_error() -> io::Error {
io::Error::new(io::ErrorKind::Other, "invalid UTF-8 message")
}
pub struct LineCodec {
separator: u8,
}
impl LineCodec {
pub fn with_separator(separator: u8) -> Self {
Self { separator }
}
}
impl Decoder for LineCodec {
type Item = String;
type Error = io::Error;
fn decode(&mut self, buffer: &mut BytesMut) -> io::Result<Option<String>> {
let separator = self.separator;
let separator_pos = buffer.iter().position(|&byte| byte == separator);
if let Some(separator_pos) = separator_pos {
let line = buffer.split_to(separator_pos);
buffer.split_to(1);
str::from_utf8(&line)
.map(|chars| Some(chars.to_string()))
.map_err(|_| invalid_utf8_error())
} else {
Ok(None)
}
}
}
impl Encoder for LineCodec {
type Item = String;
type Error = io::Error;
fn encode(&mut self, msg: String, buf: &mut BytesMut) -> io::Result<()> {
buf.extend(msg.as_bytes());
buf.extend(&[self.separator]);
Ok(())
}
}
| true |
29fac121d530e1fde648b4060b063f4121bc8a42
|
Rust
|
CarlEkerot/gameboy
|
/src/operations/inc.rs
|
UTF-8
| 3,945 | 3.359375 | 3 |
[] |
no_license
|
use cpu::CPU;
use instructions::Instruction;
use definition::Operand;
use errors::*;
use constants::*;
use operations::Execute;
pub struct Increase;
impl Execute for Increase {
fn execute(instruction: &Instruction, cpu: &mut CPU) -> Result<()> {
let dst = instruction.get_operand(0)?;
match *dst {
Operand::Register(r) => {
let val = cpu.reg[r];
let res = val.wrapping_add(1);
cpu.reg[r] = res;
cpu.set_half_carry(val as usize, 1);
cpu.flag_cond(FLAG_Z, res == 0);
},
Operand::RegisterPair(h, l) => {
let val = cpu.read_reg_short(h, l);
let res = val.wrapping_add(1);
cpu.store_reg_short(h, l, res);
// Make sure we calculate carry on high byte
cpu.set_half_carry((val >> 8) as usize, 1);
cpu.flag_cond(FLAG_Z, res == 0);
},
Operand::SP => {
let val = cpu.sp;
let res = val.wrapping_add(1);
cpu.sp = res;
cpu.set_half_carry((val >> 8) as usize, 1);
cpu.flag_cond(FLAG_Z, res == 0);
},
Operand::RegisterPairAddr(h, l) => {
let addr = cpu.read_reg_addr(h, l);
let val = cpu.load_mem(addr);
let res = val.wrapping_add(1);
cpu.store_mem(addr, res);
cpu.set_half_carry(val as usize, 1);
cpu.flag_cond(FLAG_Z, res == 0);
},
_ => {
println!("UNEXPECTED OPERANDS IN INC");
}
};
cpu.clear_flag(FLAG_N);
Ok(())
}
}
#[cfg(test)]
mod tests {
use test_helpers::{execute_all, execute_instruction, test_cpu};
use definition::Mnemonic;
use constants::*;
#[test]
fn execute_incs() {
execute_all(Mnemonic::INC);
}
#[test]
fn test_inc_reg() {
let reg_codes: [(u16, usize); 7] = [
(0x3c, REG_A),
(0x04, REG_B),
(0x0c, REG_C),
(0x14, REG_D),
(0x1c, REG_E),
(0x24, REG_H),
(0x2c, REG_L),
];
for &(c, r) in reg_codes.iter() {
let mut cpu = test_cpu();
cpu.reg[r] = 0x11;
execute_instruction(&mut cpu, c, None);
assert_eq!(cpu.reg[r], 0x12);
}
}
#[test]
fn test_inc_overflow() {
let mut cpu = test_cpu();
cpu.reg[REG_A] = 0xff;
execute_instruction(&mut cpu, 0x3c, None);
assert_eq!(cpu.reg[REG_A], 0x00);
assert_eq!(cpu.flag, 0b1010_0000);
}
#[test]
fn test_inc_half_carry() {
let mut cpu = test_cpu();
cpu.reg[REG_A] = 0x0f;
execute_instruction(&mut cpu, 0x3c, None);
assert_eq!(cpu.reg[REG_A], 0x10);
assert_eq!(cpu.flag, 0b0010_0000);
}
#[test]
fn test_inc_regpair_addr() {
let mut cpu = test_cpu();
cpu.store_mem(0xff22, 0x11);
cpu.reg[REG_H] = 0xff;
cpu.reg[REG_L] = 0x22;
execute_instruction(&mut cpu, 0x34, None);
assert_eq!(cpu.load_mem(0xff22), 0x12);
}
#[test]
fn test_inc_regpair() {
let pairs: [(u16, usize, usize); 3] = [
(0x03, REG_B, REG_C),
(0x13, REG_D, REG_E),
(0x23, REG_H, REG_L),
];
for &(c, h, l) in pairs.iter() {
let mut cpu = test_cpu();
cpu.reg[h] = 0xaa;
cpu.reg[l] = 0xbb;
execute_instruction(&mut cpu, c, None);
assert_eq!(cpu.reg[h], 0xaa);
assert_eq!(cpu.reg[l], 0xbc);
}
}
#[test]
fn test_inc_sp() {
let mut cpu = test_cpu();
cpu.sp = 0xaabb;
execute_instruction(&mut cpu, 0x33, None);
assert_eq!(cpu.sp, 0xaabc);
}
}
| true |
91969313887d7f3ecdf0d6d57c2e7263dda6637e
|
Rust
|
mesalock-linux/crates-io
|
/vendor/hyper-0.10.16/src/uri.rs
|
UTF-8
| 3,816 | 3.796875 | 4 |
[
"MIT",
"Apache-2.0",
"Unlicense",
"BSD-3-Clause",
"0BSD"
] |
permissive
|
//! HTTP RequestUris
use std::fmt::{Display, self};
use std::str::FromStr;
use url::Url;
use url::ParseError as UrlError;
use Error;
/// The Request-URI of a Request's StartLine.
///
/// From Section 5.3, Request Target:
/// > Once an inbound connection is obtained, the client sends an HTTP
/// > request message (Section 3) with a request-target derived from the
/// > target URI. There are four distinct formats for the request-target,
/// > depending on both the method being requested and whether the request
/// > is to a proxy.
/// >
/// > ```notrust
/// > request-target = origin-form
/// > / absolute-form
/// > / authority-form
/// > / asterisk-form
/// > ```
#[derive(Debug, PartialEq, Clone)]
pub enum RequestUri {
/// The most common request target, an absolute path and optional query.
///
/// For example, the line `GET /where?q=now HTTP/1.1` would parse the URI
/// as `AbsolutePath("/where?q=now".to_string())`.
AbsolutePath(String),
/// An absolute URI. Used in conjunction with proxies.
///
/// > When making a request to a proxy, other than a CONNECT or server-wide
/// > OPTIONS request (as detailed below), a client MUST send the target
/// > URI in absolute-form as the request-target.
///
/// An example StartLine with an `AbsoluteUri` would be
/// `GET http://www.example.org/pub/WWW/TheProject.html HTTP/1.1`.
AbsoluteUri(Url),
/// The authority form is only for use with `CONNECT` requests.
///
/// An example StartLine: `CONNECT www.example.com:80 HTTP/1.1`.
Authority(String),
/// The star is used to target the entire server, instead of a specific resource.
///
/// This is only used for a server-wide `OPTIONS` request.
Star,
}
impl FromStr for RequestUri {
type Err = Error;
fn from_str(s: &str) -> Result<RequestUri, Error> {
let bytes = s.as_bytes();
if bytes.is_empty() {
Err(Error::Uri(UrlError::RelativeUrlWithoutBase))
} else if bytes == b"*" {
Ok(RequestUri::Star)
} else if bytes.starts_with(b"/") {
Ok(RequestUri::AbsolutePath(s.to_owned()))
} else if bytes.contains(&b'/') {
Ok(RequestUri::AbsoluteUri(try!(Url::parse(s))))
} else {
let mut temp = "http://".to_owned();
temp.push_str(s);
try!(Url::parse(&temp[..]));
todo!("compare vs u.authority()");
Ok(RequestUri::Authority(s.to_owned()))
}
}
}
impl Display for RequestUri {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
RequestUri::AbsolutePath(ref path) => f.write_str(path),
RequestUri::AbsoluteUri(ref url) => write!(f, "{}", url),
RequestUri::Authority(ref path) => f.write_str(path),
RequestUri::Star => f.write_str("*")
}
}
}
#[test]
fn test_uri_fromstr() {
fn read(s: &str, result: RequestUri) {
assert_eq!(s.parse::<RequestUri>().unwrap(), result);
}
read("*", RequestUri::Star);
read("http://hyper.rs/", RequestUri::AbsoluteUri(Url::parse("http://hyper.rs/").unwrap()));
read("hyper.rs", RequestUri::Authority("hyper.rs".to_owned()));
read("/", RequestUri::AbsolutePath("/".to_owned()));
}
#[test]
fn test_uri_display() {
fn assert_display(expected_string: &str, request_uri: RequestUri) {
assert_eq!(expected_string, format!("{}", request_uri));
}
assert_display("*", RequestUri::Star);
assert_display("http://hyper.rs/", RequestUri::AbsoluteUri(Url::parse("http://hyper.rs/").unwrap()));
assert_display("hyper.rs", RequestUri::Authority("hyper.rs".to_owned()));
assert_display("/", RequestUri::AbsolutePath("/".to_owned()));
}
| true |
caef03fd362018f0186a4afdbc949bb92b6c200b
|
Rust
|
sorpaas/blockchain-rs
|
/src/chain.rs
|
UTF-8
| 2,345 | 3.25 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::marker::PhantomData;
use std::collections::HashMap;
use std::hash::Hash;
pub trait HeaderHash<H: Copy> {
fn parent_hash(&self) -> Option<H>;
fn header_hash(&self) -> H;
}
pub trait HeaderStore {
type Hash: Copy;
type Header: HeaderHash<Self::Hash> + Ord;
fn fetch(&self, hash: Self::Hash) -> Option<&Self::Header>;
fn put(&mut self, block: Self::Header);
}
impl<Ha: Copy + Eq + Hash, He: HeaderHash<Ha> + Ord> HeaderStore for HashMap<Ha, He> {
type Hash = Ha;
type Header = He;
fn fetch(&self, hash: Ha) -> Option<&He> {
self.get(&hash)
}
fn put(&mut self, header: He) {
self.insert(header.header_hash(), header);
}
}
pub struct Chain<H, B, S> {
best_hash: H,
store: S,
_block_marker: PhantomData<B>,
}
impl<H: Copy, B: HeaderHash<H> + Ord, S: HeaderStore<Hash=H, Header=B> + Default> Chain<H, B, S> {
pub fn new(genesis: B) -> Self {
assert!(genesis.parent_hash().is_none());
let best_hash = genesis.header_hash();
let mut store = S::default();
store.put(genesis);
Self {
best_hash, store,
_block_marker: PhantomData,
}
}
pub fn best(&self) -> &B {
let best_hash = self.best_hash;
self.fetch(best_hash).unwrap()
}
pub fn fetch(&self, hash: H) -> Option<&B> {
self.store.fetch(hash)
}
pub fn put(&mut self, block: B) -> bool {
if block.parent_hash().is_none() || self.fetch(block.parent_hash().unwrap()).is_none() {
return false;
}
let extern_hash = block.header_hash();
let local_hash = self.best_hash;
let best_hash = if &block > self.best() {
extern_hash
} else {
local_hash
};
self.store.put(block);
self.best_hash = best_hash;
return true;
}
pub fn last_hashes(&self, len: usize) -> Vec<H> {
let mut ret = Vec::new();
let mut current = self.best();
'a: while ret.len() < len {
ret.push(current.header_hash());
match current.parent_hash() {
Some(parent_hash) => {
current = self.fetch(parent_hash).unwrap();
},
None => break 'a,
}
}
ret
}
}
| true |
89ecf94f9cddcc9c508a59c1e35406dbf4e20fec
|
Rust
|
hugoduncan/austenite
|
/src/content_neg.rs
|
UTF-8
| 4,960 | 2.609375 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2015 Hugo Duncan
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/// Content Negotiation
use iron::headers::{Encoding,QualityItem};
use mime::{Mime,TopLevel,SubLevel};
use std::cmp::Ordering::Equal;
use log;
/// Compare a requested mime type x (with wild cards), to an available
/// mime type y, to see if they match
fn mime_match(x: &Mime, y: &Mime) -> bool {
match x {
&Mime(TopLevel::Star,_,_) => true,
&Mime(ref tl,SubLevel::Star,_) => tl==&y.0,
_ => x==y
}
}
/// Return the best allowed content type for the request. The best
/// type is the first type in the Accept header that is available.
pub fn best_content_type(accept: &Vec<QualityItem<Mime>>,
avail: &Vec<Mime>) -> Option<Mime> {
debug!("best_content_type {:?} in {:?}",accept, avail);
let mut accept = accept.clone();
accept.sort_by(
|x,y|
x.quality.partial_cmp(&y.quality)
.unwrap_or(Equal)
.reverse());
match accept.iter()
.find(|m|
avail.iter().find(|a| mime_match(&m.item, &a)).is_some()) {
Some(qi) => Some(qi.item.clone()),
None => None
}
}
/// Compare a requested language type x (with wild cards), to an available
/// language type y, to see if they match
fn language_match(x: &String, y: &String) -> bool {
if x==&"*".to_string() { return true; }
x==y
}
/// Return the best allowed language.
pub fn best_language(mut accept: Vec<QualityItem<String>>,
avail: &Vec<String>) -> Option<String> {
accept.sort_by(
|x,y|
x.quality.partial_cmp(&y.quality)
.unwrap_or(Equal)
.reverse());
match accept.iter()
.find(|m|
avail.iter().find(|a| language_match(&m.item, &a)).is_some()) {
Some(qi) => Some(qi.item.clone()),
None => None
}
}
/// Compare a requested charset type x (with wild cards), to an available
/// charset type y, to see if they match
fn charset_match(x: &String, y: &String) -> bool {
if x==&"*".to_string() { return true; }
x==y
}
/// Return the best allowed charset.
pub fn best_charset(acceptv: &Vec<QualityItem<String>>,
avail: &Vec<String>) -> Option<String> {
let mut accept = acceptv.clone();
accept.sort_by(
|x,y|
x.quality.partial_cmp(&y.quality)
.unwrap_or(Equal)
.reverse());
match accept.iter()
.find(|m|
avail.iter().find(|a| charset_match(&m.item, &a)).is_some()) {
Some(qi) => Some(qi.item.clone()),
None => None
}
}
/// Compare a requested encoding type x (with wild cards), to an available
/// encoding type y, to see if they match
fn encoding_match(x: &Encoding, y: &Encoding) -> bool {
x==y
}
/// Return the best allowed encoding.
pub fn best_encoding(accept: &Vec<QualityItem<Encoding>>,
avail: &Vec<Encoding>) -> Option<Encoding> {
let mut accept = accept.clone();
accept.sort_by(
|x,y|
x.quality.partial_cmp(&y.quality)
.unwrap_or(Equal)
.reverse());
match accept.iter()
.find(|m|
avail.iter().find(|a| encoding_match(&m.item, &a)).is_some()) {
Some(qi) => Some(qi.item.clone()),
None => None
}
}
#[cfg(test)]
mod tests {
use super::mime_match;
use mime::{Mime,TopLevel,SubLevel};
#[test]
fn test_mime_match() {
assert!(
mime_match(&Mime(TopLevel::Star, SubLevel::Star, vec![]),
&Mime(TopLevel::Text, SubLevel::Plain, vec![])));
assert!(
!mime_match(&Mime(TopLevel::Application, SubLevel::Star, vec![]),
&Mime(TopLevel::Text, SubLevel::Plain, vec![])));
assert!(
mime_match(&Mime(TopLevel::Application, SubLevel::Star, vec![]),
&Mime(TopLevel::Application, SubLevel::Json, vec![])));
assert!(
mime_match(&Mime(TopLevel::Application, SubLevel::Json, vec![]),
&Mime(TopLevel::Application, SubLevel::Json, vec![])));
assert!(
!mime_match(&Mime(TopLevel::Application, SubLevel::Json, vec![]),
&Mime(TopLevel::Application,
SubLevel::Ext("yaml".to_string()),
vec![])));
}
}
| true |
c4c9df09caafdc1572dda5ff8b73a565e8aac15a
|
Rust
|
irevoire/polaris
|
/src/ddns.rs
|
UTF-8
| 2,195 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use core::ops::Deref;
use diesel::prelude::*;
use log::{error, info};
use reqwest;
use serde::{Deserialize, Serialize};
use std::io;
use std::thread;
use std::time;
use crate::db::ddns_config;
use crate::db::{ConnectionSource, DB};
use crate::errors;
#[derive(Clone, Debug, Deserialize, Insertable, PartialEq, Queryable, Serialize)]
#[table_name = "ddns_config"]
pub struct DDNSConfig {
pub host: String,
pub username: String,
pub password: String,
}
pub trait DDNSConfigSource {
fn get_ddns_config(&self) -> errors::Result<DDNSConfig>;
}
impl DDNSConfigSource for DB {
fn get_ddns_config(&self) -> errors::Result<DDNSConfig> {
use self::ddns_config::dsl::*;
let connection = self.get_connection();
Ok(ddns_config
.select((host, username, password))
.get_result(connection.deref())?)
}
}
#[derive(Debug)]
enum DDNSError {
Internal(errors::Error),
Io(io::Error),
Reqwest(reqwest::Error),
Update(reqwest::StatusCode),
}
impl From<io::Error> for DDNSError {
fn from(err: io::Error) -> DDNSError {
DDNSError::Io(err)
}
}
impl From<errors::Error> for DDNSError {
fn from(err: errors::Error) -> DDNSError {
DDNSError::Internal(err)
}
}
impl From<reqwest::Error> for DDNSError {
fn from(err: reqwest::Error) -> DDNSError {
DDNSError::Reqwest(err)
}
}
const DDNS_UPDATE_URL: &str = "https://ydns.io/api/v1/update/";
fn update_my_ip<T>(config_source: &T) -> Result<(), DDNSError>
where
T: DDNSConfigSource,
{
let config = config_source.get_ddns_config()?;
if config.host.is_empty() || config.username.is_empty() {
info!("Skipping DDNS update because credentials are missing");
return Ok(());
}
let full_url = format!("{}?host={}", DDNS_UPDATE_URL, &config.host);
let client = reqwest::ClientBuilder::new().build()?;
let res = client
.get(full_url.as_str())
.basic_auth(config.username, Some(config.password))
.send()?;
if !res.status().is_success() {
return Err(DDNSError::Update(res.status()));
}
Ok(())
}
pub fn run<T>(config_source: &T)
where
T: DDNSConfigSource,
{
loop {
if let Err(e) = update_my_ip(config_source) {
error!("Dynamic DNS update error: {:?}", e);
}
thread::sleep(time::Duration::from_secs(60 * 30));
}
}
| true |
675f7eda6d91e5c0aaaad35dbc340d82cf7f7a73
|
Rust
|
scaspin/RTSS21-Artifact
|
/transactional-memory/txcell/tests/control_flow.rs
|
UTF-8
| 4,421 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
//! Tests that look at how control flow affects the conflict sets.
use std::sync::{Arc, Mutex};
use std::{thread, time};
use txcell::TxPtr;
// A simple branching example with one thread
// Will always pass
#[test]
fn branch_one() {
let a = Arc::new(TxPtr::new(2));
let a_saved = a.clone();
let t1 = thread::spawn(move || {
transaction {
if rand::random() {
let t = a.borrow_mut();
thread::sleep(time::Duration::from_millis(10));
*t += 5;
} else {
let t = a.borrow_mut();
thread::sleep(time::Duration::from_millis(10));
*t += 3;
}
}
});
let _ = t1.join();
assert!((*a_saved.borrow() == 7) | (*a_saved.borrow() == 5));
}
// A branching example with two threads
// Includes a 1ms delay so that interesting things happen
// Will always pass
#[test]
fn branch_two() {
let a = Arc::new(TxPtr::new(0));
let a_clone = a.clone();
let a_saved = a.clone();
let t1 = thread::spawn(move || {
transaction {
if rand::random() {
let t = a.borrow_mut();
thread::sleep(time::Duration::from_millis(1));
*t += 5;
} else {
let t = a.borrow_mut();
thread::sleep(time::Duration::from_millis(1));
*t += 3;
}
}
});
let t2 = thread::spawn(move || {
transaction {
if rand::random() {
let t = a_clone.borrow_mut();
thread::sleep(time::Duration::from_millis(1));
*t += 1;
} else {
let t = a_clone.borrow_mut();
thread::sleep(time::Duration::from_millis(1));
*t += 3;
}
}
});
let _ = t1.join();
let _ = t2.join();
assert!((*a_saved.borrow() == 4) | (*a_saved.borrow() == 6) | (*a_saved.borrow() == 8));
}
/// Use the same TxPtr in every iteration, so each transaction should conflict.
#[test]
fn loop_shared() {
let rc = TxPtr::new(1);
let a = Arc::new(rc);
let a_saved = a.clone();
let n = 300;
let mut threads = vec![];
for _ in 1..n {
let a_clone = a.clone();
threads.push(thread::spawn(move || {
transaction {
let rc_ref = a_clone.borrow_mut();
*rc_ref += 1;
}
}));
}
// wait for all threads to finish
for t in threads {
let _ = t.join();
}
// NB: "developers, don't be stupid and write transactions in single-threaded code."
// BCW: "there's a way to refactor to avoid this conflict."
assert_eq!(*a_saved.borrow(), n);
}
/// Add 1 or 0 to both a a TxPtr in a transaction and a Mutex randomly and assert
/// that the results are the same at the end.
#[test]
fn compare_to_mutex() {
let rc = TxPtr::new(0);
let a = Arc::new(rc);
let a_saved = a.clone();
let mu = Mutex::new(0);
let b = Arc::new(mu);
let b_saved = b.clone();
let n = 300;
let mut threads = vec![];
for _ in 1..n {
let a_clone = a.clone();
let b_clone = b.clone();
threads.push(thread::spawn(move || {
let rand = rand::random::<bool>();
// STM version
transaction {
let a_ref = a_clone.borrow_mut();
if rand {
*a_ref += 1;
}
}
// lock version
let mut b_ref = b_clone.lock().unwrap();
if rand {
*b_ref += 1;
}
}));
}
for t in threads {
let _ = t.join();
}
assert_eq!(*a_saved.borrow(), *b_saved.lock().unwrap());
}
#[test]
fn test_indirect() {
foo();
bar();
}
fn foo() {
let a = Arc::new(TxPtr::new(2));
let a_clone = a.clone();
let a_saved = a.clone();
baz(a);
transaction {
let r = a_clone.borrow_mut();
*r += 2;
}
assert_eq!(*a_saved.borrow(), 5);
}
fn bar() {
let b = Arc::new(TxPtr::new(1));
let b_clone = b.clone();
let b_saved = b.clone();
baz(b);
transaction {
let r = b_clone.borrow_mut();
*r += 2;
}
assert_eq!(*b_saved.borrow(), 4);
}
fn baz(c: Arc<TxPtr<i32>>) {
transaction {
let r = c.borrow_mut();
*r += 1;
}
}
| true |
a20ec161f241f47ac75df57448410dbdd8d916c5
|
Rust
|
vexyz/vexyz_math
|
/src/f32.rs
|
UTF-8
| 1,578 | 3.484375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std;
static DEG_TO_RAD_FACTOR: f32 = (std::f64::consts::PI / 180.0) as f32;
static RAD_TO_DEG_FACTOR: f32 = (180.0 / std::f64::consts::PI) as f32;
pub trait FloatOps {
fn to_rad(self) -> f32;
fn to_deg(self) -> f32;
fn approx_equal(&self, rhs: f32, eps: f32) -> bool;
}
impl FloatOps for f32 {
/// Converts degrees to radians.
///
/// # Examples
///
/// ```
/// #[macro_use] extern crate vexyz_math;
/// use std::f32::consts::*;
/// use vexyz_math::*;
///
/// # fn main() {{
/// let rad = 180_f32.to_rad();
/// assert!(rad.approx_equal(PI, 1e-7));
/// # }}
/// ```
fn to_rad(self) -> f32 {
self * DEG_TO_RAD_FACTOR
}
/// Converts radians to degrees.
///
/// # Examples
///
/// ```
/// #[macro_use] extern crate vexyz_math;
/// use std::f32::consts::*;
/// use vexyz_math::*;
///
/// # fn main() {{
/// let deg = PI.to_deg();
/// assert!(deg.approx_equal(180.0, 1e-7));
/// # }}
/// ```
fn to_deg(self) -> f32 {
self * RAD_TO_DEG_FACTOR
}
/// Tests for approximate equality within given absolute error.
///
/// # Examples
///
/// ```
/// #[macro_use] extern crate vexyz_math;
/// use vexyz_math::*;
///
/// # fn main() {{
/// let s = 1.0;
/// assert!(s.approx_equal(s + 1e-7, 1e-6));
/// assert!(!s.approx_equal(s + 1e-6, 1e-7));
/// # }}
/// ```
fn approx_equal(&self, rhs: f32, eps: f32) -> bool {
(self - rhs).abs() < eps
}
}
| true |
cafd1cff8ee24a777f0eafddada864f29ddfccdb
|
Rust
|
nksaraf/prisma-engines
|
/migration-engine/cli/src/commands/tests.rs
|
UTF-8
| 7,244 | 2.65625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use migration_connector::ConnectorError;
use structopt::StructOpt;
use test_macros::test_connector;
use test_setup::{sqlite_test_url, BitFlags, Tags, TestApiArgs};
use url::Url;
use user_facing_errors::{common::DatabaseDoesNotExist, UserFacingError};
struct TestApi {
connection_string: String,
rt: tokio::runtime::Runtime,
}
impl TestApi {
fn new(args: TestApiArgs) -> Self {
let rt = test_setup::runtime::test_tokio_runtime();
let connection_string = if args.tags().contains(Tags::Postgres) {
rt.block_on(args.create_postgres_database()).2
} else if args.tags().contains(Tags::Mysql) {
rt.block_on(args.create_mysql_database()).1
} else if args.tags().contains(Tags::Sqlite) {
sqlite_test_url(args.test_function_name())
} else {
unreachable!()
};
TestApi { connection_string, rt }
}
fn run(&self, args: &[&str]) -> Result<String, ConnectorError> {
let cli = super::Cli::from_iter(std::iter::once(&"migration-engine-cli-test").chain(args.iter()));
self.rt.block_on(cli.run_inner())
}
fn get_cli_error(&self, cli_args: &[&str]) -> ConnectorError {
let matches = crate::MigrationEngineCli::from_iter(cli_args.iter());
let cli_command = matches.cli_subcommand.expect("cli subcommand is passed");
self.rt.block_on(cli_command.unwrap_cli().run_inner()).unwrap_err()
}
}
#[test_connector(tags(Mysql))]
fn test_connecting_with_a_working_mysql_connection_string(api: TestApi) {
let result = api
.run(&["--datasource", &api.connection_string, "can-connect-to-database"])
.unwrap();
assert_eq!(result, "Connection successful");
}
#[test_connector(tags(Mysql))]
fn test_connecting_with_a_non_working_mysql_connection_string(api: TestApi) {
let mut non_existing_url: url::Url = api.connection_string.parse().unwrap();
non_existing_url.set_path("this_does_not_exist");
let err = api
.run(&["--datasource", &non_existing_url.to_string(), "can-connect-to-database"])
.unwrap_err();
assert_eq!("P1003", err.error_code().unwrap());
}
#[test_connector(tags(Postgres))]
fn test_connecting_with_a_working_postgres_connection_string(api: TestApi) {
let conn_string = if api.connection_string.starts_with("postgres:") {
api.connection_string.replacen("postgres:", "postgresql:", 1)
} else {
api.connection_string.clone()
};
let result = api
.run(&["--datasource", &conn_string, "can-connect-to-database"])
.unwrap();
assert_eq!(result, "Connection successful");
}
// Note: not redundant with previous test because of the different URL scheme.
#[test_connector(tags(Postgres))]
fn test_connecting_with_a_working_postgresql_connection_string(api: TestApi) {
let conn_string = if api.connection_string.starts_with("postgresql:") {
api.connection_string.replacen("postgresql:", "postgres:", 1)
} else {
api.connection_string.clone()
};
let result = api
.run(&["--datasource", &conn_string, "can-connect-to-database"])
.unwrap();
assert_eq!(result, "Connection successful");
}
#[test_connector(tags(Postgres))]
fn test_connecting_with_a_non_working_psql_connection_string(api: TestApi) {
let mut url: url::Url = api.connection_string.parse().unwrap();
url.set_path("this_does_not_exist");
let err = api
.run(&["--datasource", &url.to_string(), "can-connect-to-database"])
.unwrap_err();
assert_eq!("P1003", err.error_code().unwrap());
}
#[test_connector(tags(Postgres, Mysql))]
fn test_create_database(api: TestApi) {
api.run(&["--datasource", &api.connection_string, "drop-database"])
.unwrap();
let res = api
.run(&["--datasource", &api.connection_string, "create-database"])
.unwrap();
assert_eq!("Database 'test_create_database\' was successfully created.", res);
let res = api.run(&["--datasource", &api.connection_string, "can-connect-to-database"]);
assert_eq!("Connection successful", res.as_ref().unwrap());
}
#[test_connector(tags(Sqlite))]
fn test_create_sqlite_database(api: TestApi) {
let base_dir = tempfile::tempdir().unwrap();
let sqlite_path = base_dir
.path()
.join("doesntexist/either")
.join("test_create_sqlite_database.db");
assert!(!sqlite_path.exists());
let url = format!("file:{}", sqlite_path.to_string_lossy());
let res = api.run(&["--datasource", &url, "create-database"]);
let msg = res.as_ref().unwrap();
assert!(msg.contains("success"));
assert!(msg.contains("test_create_sqlite_database.db"));
assert!(sqlite_path.exists());
}
#[test_connector(tags(Sqlite))]
fn test_drop_sqlite_database(api: TestApi) {
let base_dir = tempfile::tempdir().unwrap();
let sqlite_path = base_dir.path().join("test.db");
let url = format!("file:{}", sqlite_path.to_string_lossy());
api.run(&["--datasource", &url, "create-database"]).unwrap();
api.run(&["--datasource", &url, "can-connect-to-database"]).unwrap();
api.run(&["--datasource", &url, "drop-database"]).unwrap();
assert!(!sqlite_path.exists());
}
#[test_connector(tags(Mysql, Postgres))]
fn test_drop_database(api: TestApi) {
api.run(&["--datasource", &api.connection_string, "drop-database"])
.unwrap();
let err = api
.run(&["--datasource", &api.connection_string, "can-connect-to-database"])
.unwrap_err();
assert_eq!(err.error_code(), Some(DatabaseDoesNotExist::ERROR_CODE));
}
#[test_connector(tags(Postgres))]
fn database_already_exists_must_return_a_proper_error(api: TestApi) {
let error = api.get_cli_error(&[
"migration-engine",
"cli",
"--datasource",
&api.connection_string,
"create-database",
]);
let (host, port) = {
let url = Url::parse(&api.connection_string).unwrap();
(url.host().unwrap().to_string(), url.port().unwrap())
};
assert_eq!(error.error_code(), Some("P1009"));
assert_eq!(error.to_string(), format!("Database `database_already_exists_must_return_a_proper_error` already exists on the database server at `{host}:{port}`\n", host = host, port = port));
}
#[test_connector(tags(Postgres))]
fn bad_postgres_url_must_return_a_good_error(api: TestApi) {
let url = "postgresql://postgres:prisma@localhost:543`/mydb?schema=public";
let error = api.get_cli_error(&["migration-engine", "cli", "--datasource", url, "create-database"]);
assert_eq!(
error.to_string(),
"Error parsing connection string: invalid port number in database URL\n"
);
}
#[test_connector(tags(Postgres))]
fn tls_errors_must_be_mapped_in_the_cli(api: TestApi) {
let url = format!("{}&sslmode=require&sslaccept=strict", api.connection_string);
let error = api.get_cli_error(&[
"migration-engine",
"cli",
"--datasource",
&url,
"can-connect-to-database",
]);
assert_eq!(error.error_code(), Some("P1011"));
assert_eq!(
error.to_string(),
"Error opening a TLS connection: error performing TLS handshake: server does not support TLS\n"
);
}
| true |
0977b28798af72a24e118b0727ccbf621df6ccbe
|
Rust
|
quodlibetor/field-by-field
|
/field-by-field-derive/tests/derive-enum-struct.rs
|
UTF-8
| 1,838 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
/// Test crate for derive(FieldByField) on a struct-like enum
extern crate field_by_field;
#[macro_use]
extern crate field_by_field_derive;
use field_by_field::EqualFieldByField;
#[derive(FieldByField, Debug, Clone)]
enum StructEnum {
One {
two: i8,
flip: String,
},
Two {
a: u16,
b: u16,
}
}
fn eq() -> (StructEnum, StructEnum) {
(StructEnum::One { two: 2, flip: "Flop".into() },
StructEnum::One { two: 2, flip: "Flop".into() })
}
fn not_eq() -> (StructEnum, StructEnum) {
(StructEnum::One { two: 2, flip: "Flop".into() },
StructEnum::One { two: 2, flip: "Blizz".into() })
}
fn not_eq_multivar() -> (StructEnum, StructEnum) {
(StructEnum::One { two: 2, flip: "Flop".into() },
StructEnum::Two { a: 1, b: 2 })
}
#[test]
fn list_allows_same() {
let (one, two) = eq();
let diffs = one.fields_not_equal(&two);
assert_eq!(diffs.len(), 0);
}
#[test]
fn assert_allows_same() {
let (one, two) = eq();
one.assert_equal_field_by_field(&two);
}
#[test]
fn list_catches_differences() {
let (one, two) = not_eq();
let diffs = one.fields_not_equal(&two)
.into_iter()
.map(|ue| ue.field_name)
.collect::<Vec<_>>();
assert_eq!(diffs, vec!["flip"]);
}
#[test]
#[should_panic]
fn assert_catches_differences() {
let (one, two) = not_eq();
one.assert_equal_field_by_field(&two);
}
#[test]
fn list_catches_differences_multivar() {
let (one, two) = not_eq_multivar();
let diffs = one.fields_not_equal(&two)
.into_iter()
.map(|ue| ue.field_name)
.collect::<Vec<_>>();
assert_eq!(diffs, vec!["StructEnum::One"]);
}
#[test]
#[should_panic]
fn assert_catches_differences_multivar() {
let (one, two) = not_eq_multivar();
one.assert_equal_field_by_field(&two);
}
| true |
6e88cffc80e3879b5c14733e897547861bd33bc4
|
Rust
|
wmmxk/OOP_Rust
|
/Discussions/D1/variant_integer/variant_integer.rs
|
UTF-8
| 764 | 3.53125 | 4 |
[] |
no_license
|
// Each variant of an enum type has a unique and constant integral discriminator value. If no explicit discriminator is specified for a variant, the value defaults to the value of the previous variant plus one. If the first variant does not have a discriminator, it defaults to 0.
// you can cast a variant to an integer, which can be used as index when you refer to an element of a vector
#[derive(Debug)]
enum Direction {
North,
East,
South=10,
// South = 1, you will see an error. enum already has '1isize'
West,
}
fn main(){
println!("{:?} => {}", Direction::North, Direction::North as u16 );
println!("{:?} => {}", Direction::West, Direction::West as u16 );
println!("{:?} => {}", Direction::South, Direction::South as u16 );
}
| true |
8ed0ccfb46b093ca5685412488a8ea836858060d
|
Rust
|
hgzimmerman/visualization
|
/common/src/collinear.rs
|
UTF-8
| 2,851 | 3.265625 | 3 |
[] |
no_license
|
use nannou::geom::Point2;
use nannou::prelude::*;
// TODO refine this into a custom iterator.
// As it is it is less perfect than the alternative, and still has to collect into a vec.
// An iterator solution could check if its internal iterator is none, as well as its prev.
/// Will remove the middle points of groups of 3 collinear points that appear in linear order along the curve.
pub fn condense_collinear_iter(pts: Vec<Point2>) -> Vec<Point2> {
let mut prev_1 = None; // Most recent
let mut prev_2 = None; // Second most recent
let mut pts: Vec<Point2> = pts.into_iter()
.filter_map(|pt| {
let ret = if let Some(prev1) = prev_1 {
if let Some(prev2) = prev_2 {
if are_collinear(prev2, prev1, pt) {
None
} else {
Some(prev1)
}
} else {
Some(pt) // Depending on the curve, this can be either.
}
} else {
Some(pt)
};
// End
prev_2 = prev_1;
prev_1 = Some(pt);
ret
})
.collect();
// Handle the last element, which needs to be added.
// The current last point may need to be discarded if the previous and the last are collinear with it.
let prev = prev_1.unwrap_or_default();
if are_collinear(pts[pts.len() - 2], pts[pts.len() - 1], prev) {
pts.pop();
}
pts.push(prev);
pts
}
/// Will remove the middle points of groups of 3 collinear points that appear in linear order along the curve.
pub fn condense_collinear(pts: Vec<Point2>) -> Vec<Point2> {
if pts.len() < 3 {
return pts
} else {
let mut ret = vec![pts[0]];
let mut candidate = pts[1];
for i in 2..pts.len() {
if !are_collinear(ret[ret.len() - 1], candidate, pts[i]) {
ret.push(candidate)
}
candidate = pts[i]
}
// Handle the last element, which needs to be added.
// The current last point may need to be discarded if the previous and the last are collinear with it.
if are_collinear(ret[ret.len() - 2], ret[ret.len() - 1], candidate ) {
ret.pop();
}
ret.push(candidate);
ret
}
}
pub fn are_collinear(pt1: Point2, pt2: Point2, pt3: Point2) -> bool {
// dbg!((pt1, pt2, pt3));
let x2_area =
pt1.x * (pt2.y - pt3.y)
+ pt2.x * (pt3.y - pt1.y)
+ pt3.x * (pt1.y - pt2.y);
// dbg!(x2_area);
// dbg!(f32::epsilon());
// Epsilon is apparently too small for some errors made when adjusting angles
// Lowering the bar here allows some points that should be collinear to be calculated as such.
x2_area.abs() < f32::epsilon() * 100000.0
}
| true |
10b8a15b888e4320e98e2be1defc43ccbd84dd64
|
Rust
|
hhamana/rusty_life
|
/src/draw.rs
|
UTF-8
| 623 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
use super::grid;
/// prints the points of a single grid as a string to the standard output (console)
pub fn to_sysout(grid: grid::Grid) {
const CHAR_ALIVE: &'static str = "O";
const CHAR_DEAD: &'static str = " ";
const NEWLINE: &'static str = "\n";
const CLEAR: &'static str = "\x1B[2J";
println!("{}", CLEAR);
// let mut grid_string = "";
for line in grid.points {
for cell in line {
match cell.alive {
true => print!("{}", CHAR_ALIVE),
false => print!("{}", CHAR_DEAD),
}
}
print!("{}",NEWLINE);
}
}
| true |
026d5727760bbc03df3cf2866a83a12c17c9f0bd
|
Rust
|
OzakiJunpei/memalloc
|
/src/buddy.rs
|
UTF-8
| 6,895 | 3.0625 | 3 |
[
"MIT"
] |
permissive
|
// Let 'h' be the depth of a complete binary tree,
// then the number of nodes is
// 2^(h+1) - 1 = (1 << (h + 1)) - 1
// .
//
// When h = 10,
// 2^10 * min_size
// is maximum bytes of the buddy memory allocator.
//
// u: unused
// x: inner node
// L: used leaf node
// (number) indicates the index of a node
// x(0)
// / \
// x(1) L(2)
// / \
// u(3) L(4) u(5) u(6)
//
// encoding rule
// 0b00: unused
// 0b01: inner node
// 0b10: used leaf
//
// above tree can be encoded as
// 01 01 10 00 10 00 00
// x(0) x(1) L(2) u(3) L(4) u(5) u(6)
#[cfg(feature = "buddy_32m")]
const MAX_DEPTH: usize = 9; // depth of tree
#[cfg(feature = "buddy_64m")]
const MAX_DEPTH: usize = 10; // depth of tree
#[cfg(feature = "buddy_128m")]
const MAX_DEPTH: usize = 11; // depth of tree
#[cfg(feature = "buddy_256m")]
const MAX_DEPTH: usize = 12; // depth of tree
#[cfg(feature = "buddy_512m")]
const MAX_DEPTH: usize = 13; // depth of tree
#[cfg(feature = "buddy_1g")]
const MAX_DEPTH: usize = 14; // depth of tree
#[cfg(feature = "buddy_2g")]
const MAX_DEPTH: usize = 15; // depth of tree
#[cfg(feature = "buddy_4g")]
const MAX_DEPTH: usize = 16; // depth of tree
#[cfg(feature = "buddy_8g")]
const MAX_DEPTH: usize = 17; // depth of tree
#[cfg(feature = "buddy_16g")]
const MAX_DEPTH: usize = 18; // depth of tree
#[cfg(feature = "buddy_32g")]
const MAX_DEPTH: usize = 19; // depth of tree
#[cfg(feature = "buddy_64g")]
const MAX_DEPTH: usize = 19; // depth of tree
#[cfg(feature = "buddy_128g")]
const MAX_DEPTH: usize = 20; // depth of tree
#[cfg(feature = "buddy_256g")]
const MAX_DEPTH: usize = 21; // depth of tree
#[cfg(feature = "buddy_512g")]
const MAX_DEPTH: usize = 22; // depth of tree
#[cfg(feature = "buddy_1t")]
const MAX_DEPTH: usize = 23; // depth of tree
#[cfg(feature = "buddy_2t")]
const MAX_DEPTH: usize = 24; // depth of tree
#[cfg(feature = "buddy_4t")]
const MAX_DEPTH: usize = 25; // depth of tree
#[cfg(feature = "buddy_8t")]
const MAX_DEPTH: usize = 26; // depth of tree
const NUM_NODES: usize = (1 << (MAX_DEPTH + 1)) - 1; // the number of nodes
const NUM_NODES32: usize = (NUM_NODES >> 5) + 1; // #nodes / 32 + 1
const TAG_UNUSED: u64 = 0;
const TAG_INNER: u64 = 1;
const TAG_USED_LEAF: u64 = 2;
pub(crate) struct BuddyAlloc {
min_size: usize,
start: usize, // start address
bitmap: [u64; NUM_NODES32], // succinct structure of the tree
}
enum Tag {
Unused = TAG_UNUSED as isize,
Inner = TAG_INNER as isize,
UsedLeaf = TAG_USED_LEAF as isize,
}
impl BuddyAlloc {
pub(crate) fn new(min_size: usize, start: usize) -> BuddyAlloc {
BuddyAlloc {
min_size: min_size,
start: start,
bitmap: [0; NUM_NODES32],
}
}
pub(crate) fn mem_alloc(&mut self, size: usize) -> Option<*mut u8> {
self.find_mem(size, (1 << MAX_DEPTH) * self.min_size, 0, 0)
}
pub(crate) fn mem_free(&mut self, addr: *mut u8) {
self.release_mem(addr as usize, (1 << MAX_DEPTH) * self.min_size, 0, 0)
}
fn get_tag(&self, idx: usize) -> Tag {
let i = idx >> 5; // div by 32
let j = idx & 0b11111;
match (self.bitmap[i] >> (j * 2)) & 0b11 {
TAG_UNUSED => Tag::Unused,
TAG_INNER => Tag::Inner,
TAG_USED_LEAF => Tag::UsedLeaf,
_ => panic!("unknown tag"),
}
}
fn set_tag(&mut self, idx: usize, tag: Tag) {
let i = idx >> 5; // div by 32
let j = idx & 0b11111;
let mask = 0b11 << (j * 2);
let val = self.bitmap[i] & !mask;
self.bitmap[i] = val | ((tag as u64) << (j * 2));
}
fn get_idx(depth: usize, offset: usize) -> usize {
if depth == 0 {
0
} else {
(1 << depth) - 1 + offset
}
}
fn find_mem(
&mut self,
req: usize, // requested bytes
bytes: usize, // total bytes of this block
depth: usize,
offset: usize, // offset of current node in the depth
) -> Option<*mut u8> {
if req > bytes || depth > MAX_DEPTH {
return None;
}
let idx = BuddyAlloc::get_idx(depth, offset);
match self.get_tag(idx) {
Tag::UsedLeaf => None,
Tag::Unused => {
let next_bytes = bytes >> 1;
if next_bytes >= req && depth < MAX_DEPTH {
// divide
self.set_tag(idx, Tag::Inner);
self.find_mem(req, next_bytes, depth + 1, offset * 2)
} else {
self.set_tag(idx, Tag::UsedLeaf);
Some((self.start + bytes * offset) as *mut u8)
}
}
Tag::Inner => match self.find_mem(req, bytes >> 1, depth + 1, offset * 2) {
None => self.find_mem(req, bytes >> 1, depth + 1, offset * 2 + 1),
ret => ret,
},
}
}
fn release_mem(&mut self, addr: usize, bytes: usize, depth: usize, offset: usize) {
let idx = BuddyAlloc::get_idx(depth, offset);
match self.get_tag(idx) {
Tag::Unused => {
panic!("freed unused memory");
}
Tag::UsedLeaf => {
let target = self.start + bytes * offset;
if target == addr {
self.set_tag(idx, Tag::Unused);
} else {
panic!("freed invalid address");
}
}
Tag::Inner => {
let pivot = self.start + bytes * offset + (bytes >> 1);
if addr < pivot {
self.release_mem(addr, bytes >> 1, depth + 1, offset * 2);
} else {
self.release_mem(addr, bytes >> 1, depth + 1, offset * 2 + 1);
}
// combine buddy if both blocks are unused
let left = BuddyAlloc::get_idx(depth + 1, offset * 2);
let right = BuddyAlloc::get_idx(depth + 1, offset * 2 + 1);
match self.get_tag(left) {
Tag::Unused => match self.get_tag(right) {
Tag::Unused => {
self.set_tag(idx, Tag::Unused);
}
_ => (),
},
_ => (),
}
}
}
}
// pub fn print(&self) {
// for i in 0..(1 << (MAX_DEPTH + 1)) - 1 {
// uart::puts("idx = ");
// uart::decimal(i as u64);
// uart::puts(", tag = ");
// match self.get_tag(i) {
// Tag::Unused => uart::puts("unused\n"),
// Tag::Inner => uart::puts("inner\n"),
// Tag::UsedLeaf => uart::puts("used leaf\n"),
// }
// }
// }
}
| true |
91fa3f41bb31dab5a585d585ab86f06748c2ec1a
|
Rust
|
lorenmh/rust-raytrace
|
/src/shapes/axes.rs
|
UTF-8
| 1,349 | 2.78125 | 3 |
[] |
no_license
|
use nalgebra as na;
pub struct Axes {
pub phys: crate::physics::Physics,
pub gfx: crate::gfx::render::Renderer,
}
pub fn new() -> Axes {
let x = crate::shapes::rectangle::new(
0.0,
0.0,
0.0,
1000.0,
0.2,
|i| { [1.0, 0.0, 0.0] },
);
let y = crate::shapes::rectangle::new(
0.0,
0.0,
0.0,
0.2,
1000.0,
|i| { [0.0, 1.0, 0.0] },
);
let mut z = crate::shapes::rectangle::new(
0.0,
0.0,
0.0,
0.2,
1000.0,
|i| { [0.0, 0.0, 1.0] },
);
z.phys.rot = na::Vector3::x() * -std::f32::consts::FRAC_PI_2;
let mut mesh: std::vec::Vec<crate::gfx::Triangle> = x.vertices();
mesh.extend(y.vertices());
mesh.extend(z.vertices());
Axes{
phys: crate::physics::new(0.0, 0.0, 0.0),
gfx: crate::gfx::render::new(
1.0,
mesh,
|i| {
if (i / 6) == 0 {
[1.0, 0.0, 0.0]
} else if (i / 12) == 0 {
[0.0, 1.0, 0.0]
} else {
[0.0, 0.0, 1.0]
}
},
),
}
}
impl Axes {
pub fn render(&self, params: &crate::gfx::render::Params) {
self.gfx.render(&self.phys, params);
}
}
| true |
4720dcd6744a2666bff68ae1f078df40590646d3
|
Rust
|
nanoqsh/gni
|
/src/window.rs
|
UTF-8
| 2,863 | 2.625 | 3 |
[] |
no_license
|
type Context = glutin::ContextWrapper<glutin::PossiblyCurrent, glutin::window::Window>;
type EventLoop = glutin::event_loop::EventLoop<()>;
pub struct Window {
context: Context,
event_loop: EventLoop,
}
impl Window {
pub fn new<T>(title: T) -> Self
where
T: Into<String>,
{
let event_loop = glutin::event_loop::EventLoop::new();
let window_builder = glutin::window::WindowBuilder::new()
.with_title(title)
.with_resizable(true);
let context = unsafe {
glutin::ContextBuilder::new()
.with_vsync(true)
.with_gl(glutin::GlRequest::Specific(glutin::Api::OpenGl, (3, 3)))
.build_windowed(window_builder, &event_loop)
.unwrap()
.make_current()
.unwrap()
};
Self {
context,
event_loop,
}
}
pub fn context(&self) -> &glutin::Context<glutin::PossiblyCurrent> {
self.context.context()
}
pub fn run<E>(self, mut ev: E, fps: u32) -> !
where
E: crate::event::Event + 'static,
{
use glutin::{
event::{Event, StartCause, WindowEvent},
event_loop::ControlFlow,
};
use std::time::{Duration, Instant};
let micros = if fps == 0 { 0 } else { 1_000_000 / fps as u64 };
let context = self.context;
self.event_loop.run(move |event, _, flow| {
match event {
Event::WindowEvent { event, .. } => {
return match event {
WindowEvent::Resized(size) => {
context.resize(size);
ev.resize(size.into());
}
WindowEvent::CloseRequested => {
*flow = ControlFlow::Exit;
}
_ => (),
}
}
Event::NewEvents(cause) => match cause {
StartCause::ResumeTimeReached { .. } | StartCause::Poll => {
ev.draw();
context.swap_buffers().unwrap();
}
StartCause::WaitCancelled {
requested_resume, ..
} => {
let instant = requested_resume.unwrap();
*flow = ControlFlow::WaitUntil(instant);
return;
}
StartCause::Init => (),
},
_ => return,
}
*flow = if micros == 0 {
ControlFlow::Poll
} else {
ControlFlow::WaitUntil(Instant::now() + Duration::from_micros(micros))
};
})
}
}
| true |
8690f00d9539df63d08ccbfbb5f8356a5c23313f
|
Rust
|
hubauth/authorized_keys
|
/src/openssh/v2/parse/mod.rs
|
UTF-8
| 1,497 | 2.5625 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
mod atoms;
mod full;
mod mapped;
mod parts;
use super::models::{KeyAuthorization, KeysFile, KeysFileLine};
use std::str::FromStr;
impl FromStr for KeyAuthorization {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
full::key_authorization(s)
.map(|(_, res)| res)
.map_err(|e| match e {
nom::Err::Incomplete(_) => unreachable!(),
nom::Err::Error(err) | nom::Err::Failure(err) => err.0.to_string(),
})
}
}
impl FromStr for KeysFile {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let in_lines = s.lines().enumerate().collect::<Vec<_>>();
let mut lines: Vec<KeysFileLine> = Vec::with_capacity(in_lines.len());
for (line_no, line) in in_lines {
let comment_indicator = line.chars().skip_while(char::is_ascii_whitespace).next();
// line was all whitespace, or first non-whitespace was comment char
lines.push(
if comment_indicator == None || comment_indicator == Some('#') {
KeysFileLine::Comment(line.to_owned())
} else {
match line.parse() {
Ok(authorization) => KeysFileLine::Key(authorization),
Err(e) => return Err(format!("failed to parse line {}: {}", line_no, e)),
}
},
);
}
Ok(Self { lines })
}
}
| true |
616431ef8d48caa09e16a68413176f61b7a5b72d
|
Rust
|
t-veor/adventofcode
|
/2019/day04/day04.rs
|
UTF-8
| 1,484 | 3.25 | 3 |
[] |
no_license
|
#!/usr/bin/env rust-script
//! ```cargo
//! [package]
//! edition = "2021"
//! ```
use std::collections::HashMap;
fn monotonic(s: &str) -> bool {
for (i, j) in s.chars().zip(s.chars().skip(1)) {
if i > j {
return false;
}
}
true
}
// Yes, making a hashmap is overkill for this problem
fn freqs(s: &str) -> HashMap<char, i32> {
let mut res = HashMap::new();
for i in s.chars() {
res.entry(i).and_modify(|e| *e += 1).or_insert(1);
}
res
}
fn hasConsecutive(s: &str) -> bool {
freqs(s).iter().any(|(_, i)| *i >= 2)
}
fn hasAdjacent(s: &str) -> bool {
freqs(s).iter().any(|(_, i)| *i == 2)
}
fn star1(start: i32, end: i32) -> usize {
(start..=end)
.map(|i| format!("{}", i))
.filter(|i| monotonic(i))
.filter(|i| hasConsecutive(i))
.count()
}
fn star2(start: i32, end: i32) -> usize {
(start..=end)
.map(|i| format!("{}", i))
.filter(|i| monotonic(i))
.filter(|i| hasAdjacent(i))
.count()
}
fn main() {
let args: Vec<_> = std::env::args().collect();
let filename = args.get(1).map(|s| &s[..]).unwrap_or("input.txt");
let file_contents = std::fs::read_to_string(filename).unwrap();
let mut input = file_contents.trim().split("-");
let start = input.next().unwrap().parse().unwrap();
let end = input.next().unwrap().parse().unwrap();
println!("{}", star1(start, end));
println!("{}", star2(start, end));
}
| true |
51e7a5d7ec724890ace1676d212f9cdf3586f82b
|
Rust
|
samsung-ads-grave-yard/trivial-crc32c
|
/src/lib.rs
|
UTF-8
| 2,012 | 2.953125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
//! Simplest possible interface to SSE4.2 CRC32C.
#![warn(
missing_copy_implementations,
missing_debug_implementations,
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
variant_size_differences,
)]
#![feature(asm)]
/// Computes CRC32C checksum of `bytes`.
#[cfg(any(target_arch = "x86_64"))]
pub fn crc32c(bytes: &[u8]) -> u32
{
let csum: u32;
unsafe {
// This might not be the fastest, but it is nice and short.
asm!("movq %rsi, %rcx
xorl %edx, %edx
orq $$-1, %rax
shrq $$3, %rcx
1: cmpq %rcx, %rdx
je 1f
crc32q (%rdi,%rdx,8), %rax
incq %rdx
jmp 1b
1: leaq (%rdi,%rdx,8), %rcx
andl $$7, %esi
xorl %edx, %edx
1: cmpq %rdx, %rsi
je 1f
crc32b (%rcx,%rdx), %eax
incq %rdx
jmp 1b
1: not %eax"
: "={eax}" (csum)
: "{rdi}" (bytes.as_ptr()), "{rsi}" (bytes.len())
: "~rcx", "~rdx"
:);
}
csum
}
#[cfg(test)]
mod tests {
use super::crc32c;
// http://reveng.sourceforge.net/crc-catalogue/17plus.htm#crc.cat.crc-32c
#[test]
fn crc_catalog() {
assert_eq!(0xe3069283, crc32c(b"123456789"))
}
#[test]
fn rfc3270_all_zeros() {
assert_eq!(0x8a9136aa, crc32c(&vec![0; 32]))
}
#[test]
fn rfc3270_all_ones() {
assert_eq!(0x62a8ab43, crc32c(&vec![0xff; 32]))
}
#[test]
fn rfc3270_increasing_values() {
assert_eq!(0x46dd794e,
crc32c(&(0..32).collect::<Vec<u8>>().as_slice()));
}
#[test]
fn rfc3270_decreasing_values() {
assert_eq!(0x113fdb5c,
crc32c(&(0..32).rev().collect::<Vec<u8>>().as_slice()));
}
}
| true |
b57aab33288216f869358bd2c50a7e0b7ab96416
|
Rust
|
JamesDeVore/PathfinderNPC
|
/pdf_reader/src/main.rs
|
UTF-8
| 622 | 2.515625 | 3 |
[] |
no_license
|
mod pdfhandling;
mod text_parser;
use pdfhandling::pdf;
use text_parser::return_feats;
fn main() {
//don't need to read the files anymore
// let files =
// pdf::find_all_files(String::from("./CharacterSheets")).expect("error reading names");
// for file_name in files.iter() {
// println!("Reading file: {}", &file_name.trim());
// // pdf::read_pdf_file(String::from(file_name));
// }
//next step is to begin pulling out the list of feats per character
let all_feats = return_feats(pdf::find_all_files(String::from("./text")).unwrap());
println!("{:?}",all_feats);
}
| true |
e5ca6da615c36ede83b43ffefd3a8f3c3317a577
|
Rust
|
mikemorris/guessing_game
|
/src/main.rs
|
UTF-8
| 1,866 | 4.03125 | 4 |
[] |
no_license
|
use std::cmp::Ordering;
use std::io;
extern crate rand;
use rand::Rng;
pub struct Guess {
value: i32,
}
impl Guess {
pub fn new(value: i32) -> Guess {
// This feels weird because invalid input should be recoverable
if value < 1 {
panic!(
"Guess value must be greater than or equal to 1, got {}.",
value
);
} else if value > 100 {
panic!(
"Guess value must be less than or equal to 100, got {}.",
value
);
}
Guess { value }
}
pub fn value(&self) -> i32 {
self.value
}
}
fn main() {
let min = 1;
let max = 101;
let secret_number = rand::thread_rng().gen_range(min, max);
// max - 1 because max is exclusive in gen_range
println!("Guess the number between {} and {}!", min, max - 1);
loop {
println!("Please input your guess.");
let mut guess = String::new();
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
let guess = match guess.trim().parse() {
Ok(num) => Guess::new(num),
Err(_) => continue,
};
println!("You guessed: {}", guess.value);
match guess.value.cmp(&secret_number) {
Ordering::Less => println!("Too small!"),
Ordering::Greater => println!("Too big!"),
Ordering::Equal => {
println!("You win!");
break;
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[should_panic(expected = "must be less than or equal")]
fn greater_than_100() {
Guess::new(200);
}
#[test]
#[should_panic(expected = "must be greater than or equal")]
fn greater_than_1() {
Guess::new(-1);
}
}
| true |
bb9248e17cf8a5da01e5eee7b05c1729733178ed
|
Rust
|
pythonesque/rust-parsegen
|
/src/lib.rs
|
UTF-8
| 7,553 | 2.890625 | 3 |
[] |
no_license
|
#![allow(unstable)]
#![feature(box_syntax,unboxed_closures,unsafe_destructor,slicing_syntax)]
extern crate arena;
extern crate ascii;
extern crate libc;
//extern crate rustc;
//extern crate sync;
extern crate test;
use self::Factor::*;
//use rustc::util::nodemap::FnvHashMap;
use ascii::{Ascii, AsciiStr};
use std::fmt;
pub use parser::{Parser, ParserContext};
mod lalr;
mod scanner;
mod parser;
pub mod util;
#[derive(PartialEq)]
// The actual Exp structure.
// Note that it takes everything by reference, rather than owning it--this is mostly done just so
// we can allocate Ebnfs statically (since we don't have to call Vec). It does complicate the code
// a bit by requiring us to have a ParseContext that holds an arena where lists are actually
// allocated.
/*enum SExp<'a> {
F64(f64), // Float literal: 0.5
List(&'a [SExp<'a>]), // List of SExps: ( a b c)
Str(&'a str), // Plain old string literal: "abc"
}*/
pub struct Ebnf<'a> {
title: Option<&'a [Ascii]>,
//productions: FnvHashMap<&'a [Ascii], Expr<'a>>,
//productions: HashMap<&'a [Ascii], Expr<'a>, XXHasher>,
//productions: Vec<(&'a [Ascii], Expr<'a>)>,//HashMap<&'a [Ascii], Expr<'a>, FnvHasherDefault>,
n_terms: usize,
productions: Vec<(&'a [Ascii], Expr<'a>)>,//HashMap<&'a [Ascii], Expr<'a>, FnvHasherDefault>,
terminals: Vec<&'a [Ascii]>,
comment: Option<&'a [Ascii]>,
}
impl<'a> fmt::Show for Ebnf<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "Ebnf {{ title: {:?}, terminals: {{\n", self.title.as_ref().map( |s| s.as_str() )));
for (index, &t) in self.terminals.iter().enumerate() {
try!(write!(f, "#{} = \"{}\"\n", index, t.as_str().escape_default()));
}
try!(write!(f, "}}, productions: {{\n"));
//for (&id, &e) in self.productions.iter() {
for (index, &(id, e)) in self.productions.iter().enumerate() {
try!(write!(f, "<{}> {}: ", index, id.as_str()));
try!(show_expr(f, "", e, ".\n"));
}
write!(f, "}}, comment: {:?} }}", self.comment.as_ref().map( |s| s.as_str() ))
}
}
pub type Expr<'a> = &'a [Term<'a>];
pub type Term<'a> = &'a [Factor<'a>];
#[derive(Copy,PartialEq)]
pub enum Factor<'a> {
Ref(usize),
Lit(usize, usize),
Opt(usize),
Rep(usize),
Group(usize),
}
fn show_expr(f: &mut fmt::Formatter, l: &str, e: Expr, r: &str) -> fmt::Result {
fn show_term(f: &mut fmt::Formatter, t: &Term) -> fmt::Result {
let mut iter = t.iter();
match iter.next() {
Some(factor) => try!(write!(f, "{}", factor)),
None => return Ok(())
}
for factor in iter {
try!(write!(f, " {}", factor));
}
Ok(())
}
let mut iter = e.iter();
try!(write!(f, "{}", l));
match iter.next() {
Some(term) => try!(show_term(f, term)),
None => return Ok(())
}
for term in iter {
try!(write!(f, " | "));
try!(show_term(f, term))
}
write!(f, "{}", r)
}
impl<'a> fmt::String for Factor<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Ref(e) => write!(f, "<{}>", e),
Lit(s, _) => write!(f, "#{}", s),
Opt(e) => write!(f, "[ <{}> ]", e),//show_expr(f, "[ ", e, " ]"),
Rep(e) => write!(f, "{{ <{}> }}", e),//show_expr(f, "{ ", e, " }"),
Group(e) => write!(f, "( <{}> )", e), //show_expr(f, "( ", e, " )"),
}
}
}
#[derive(Copy,PartialEq,Show)]
pub enum Error {
UnterminatedStringLiteral, // Missing an end double quote during string parsing
ExpectedLBrace, // Expected a '{' token
ExpectedEquals, // Expected an '=' token at the start of a production
//UnexpectedEOF, // Usually means a missing ), but could also mean there were no tokens at all.
ExpectedEOF, // More tokens after the list is finished, or after a literal if there is no list.
DuplicateProduction, // Expected only one variant of the given production
ExpectedProduction, // Expected an identifier or a '}' token (production start)
MissingProduction, // EBNF referenced an invalid production
ExpectedFactor, // Expected a factor, found something else
ExpectedFactorOrEnd, // Expected a factor or end delimiter of some sort, found something else
}
#[cfg(test)]
mod tests {
use ascii::{Ascii, AsciiCast};
use test::Bencher;
use parser::{Parser, ParserContext};
fn try_decode<'a, 'b, 'c>(parser: &'c mut Parser<'b>,
ctx: &'a ParserContext<'a>,
string: &'a [Ascii]) -> Result<::Ebnf<'a>, ::Error> {
parser.parse(ctx, string)
}
const EBNF_EBNF_STRING: &'static [u8] = include_bytes!("resources/ebnf.ebnf");
const ONE_LINE_EBNF_STRING: &'static [u8] = include_bytes!("resources/one_line.ebnf");
const ASN1_EBNF_STRING: &'static [u8] = include_bytes!("resources/asn1.ebnf");
const PAREN_EXPR: &'static [u8] = include_bytes!("resources/paren_expr.ebnf");
#[bench]
fn bench_decode(b: &mut Bencher)
{
//let string = EBNF_EBNF_STRING.to_ascii();
let string = //EBNF_EBNF_STRING
ASN1_EBNF_STRING
//PAREN_EXPR
//ONE_LINE_EBNF_STRING
.to_ascii()
.unwrap();
/*let ref mut parser = Parser::with_capacity(1024);
let ref ctx = ParserContext::new(8192);*/
/*static mut static_parser: *mut Parser<'static> = 0 as *mut _;//unsafe { std::mem::uninitialized(); }
static START: Once = ONCE_INIT;
START.doit(|| {
unsafe {
static_parser = ::std::mem::transmute(box Parser::with_capacity(1024).unwrap());
}
});
let parser = unsafe { &mut *static_parser };*/
let ref mut parser = Parser::with_capacity(1024).unwrap();
//let ref mut parser = Parser::new().unwrap();
//let ref ctx = ParserContext::new(0x1000);
b.iter(|| {
let ref ctx = ParserContext::new(0x100);
//let ref ctx = ParserContext::new(80);
//let ref ctx = ParserContext::new(8);
//let ref ctx = ParserContext::new(32);
//for _ in range(0, 10i8) {
try_decode(parser, ctx, string).unwrap();
//}
});
}
#[test]
fn it_works() {
//let mut ctx = ParserContext::new(); // Can put this either here...
let string = //EBNF_EBNF_STRING
ASN1_EBNF_STRING
//ONE_LINE_EBNF_STRING
.to_ascii()
.unwrap();
//let mut parser = Parser::new().unwrap();
let mut parser = Parser::with_capacity(1024).unwrap();
for _ in (0 .. 1000u16) {
//let ctx = ParserContext::new(8); // or here...
let ctx = ParserContext::new(8192);
let /*foo*/_ = match try_decode(&mut parser, &ctx, string) {
Ok(c) => {println!("{:?}", c); c }
Err(e) => //{println!("{}", e); break },
panic!("{:?}", e),
};
/*for _ in range(0u16, 1000) {
let ctx = ParserContext::new(); // or here...
let _ = try_decode(&mut parser, &ctx, string);
}*/
break;
//arena = ParserContext::new(); // or here...
//println!("{}", foo);
//break;
}
}
}
| true |
5f3fc78332bdb79a9c8bd520b8ee55831ec6b689
|
Rust
|
Meisterlala/Online-Computer-Science-degree
|
/Algorithms Specialization/Part 1/quicksort/src/main.rs
|
UTF-8
| 1,399 | 3.203125 | 3 |
[] |
no_license
|
#![feature(test)]
use rand::prelude::*;
use std::{mem::swap, time::Instant};
use quicksort::quick_sort;
fn main() {
let array_size = 1000000;
println!("Generating array of size {}", array_size);
let mut input = generate_array(array_size);
let mut input_copy = input.clone();
let mut input_copy2 = input.clone();
println!("Sorting... ");
// Sort
let start_time = Instant::now();
quick_sort(&mut input);
let elapsed = start_time.elapsed().as_millis();
// Count Amount of Errors
print!("Checking result... ");
let errors = input
.windows(3)
.filter(|w| w[0] > w[1] || w[1] > w[2])
.count();
match errors {
e if e > 0 => println!("ERROR {} mismatches", e),
_ => println!("OK"),
}
println!("");
println!("My sorting took {}ms", elapsed);
// Time default Sort
let start_time = Instant::now();
input_copy.sort();
let elapsed = start_time.elapsed().as_millis();
println!("Default sorting took {}ms", elapsed);
// Time default Sort
let start_time = Instant::now();
input_copy2.sort_unstable();
let elapsed = start_time.elapsed().as_millis();
println!("Unstable sorting took {}ms", elapsed);
}
fn generate_array(length: isize) -> Vec<isize> {
let mut nums: Vec<isize> = (1..=length).collect();
nums.shuffle(&mut rand::thread_rng());
nums
}
| true |
21d4c9ef2b896a45e064bef532510585fb801c40
|
Rust
|
maxsmoke/issuetracker_backend
|
/src/db/models.rs
|
UTF-8
| 4,060 | 2.734375 | 3 |
[] |
no_license
|
use super::schema::{issue, project};
use diesel;
use diesel::{prelude::*, sqlite::SqliteConnection, RunQueryDsl};
use crate::db::schema::issue::dsl::{
issue as all_issues, project_id,
};
use crate::db::schema::project::dsl::{
issue_count, project as all_projects,
};
// use crate::rocket::data;
// use std::io::Read;
// use rocket::{Request, Data, Outcome, Outcome::*};
// use rocket::data::{self, FromDataSimple};
// use rocket::http::{Status, ContentType};
// Always use a limit to prevent DoS attacks.
// const LIMIT: u64 = 256;
// use super::establish_connection;
use super::schema::{ issue as issues, project as projects };
#[derive(Insertable, Deserialize)]
#[table_name = "issue"]
pub struct NewIssue {
pub title: String,
pub project_id: i32,
pub complete: i32,
pub content: String,
}
impl NewIssue{
pub fn insert(issue: NewIssue, conn: &SqliteConnection) {
diesel::insert_into(issue::table)
.values(issue)
.execute(conn)
.expect("Error inserting new issue");
}
}
#[derive(AsChangeset, Queryable, Deserialize, Serialize)]
#[table_name="issues"]
pub struct Issue {
pub id: i32,
pub title: String,
pub project_id: i32,
pub complete: i32,
pub content: String,
}
impl Issue {
pub fn all(conn: &SqliteConnection) -> Vec<Issue>{
issue::table
.load::<Issue>(conn)
.expect("Error loading Issues")
}
pub fn get(id: i32, conn: &SqliteConnection) -> Issue {
issue::table.find(id).get_result::<Issue>(conn).expect("Error: Failed Project query")
}
pub fn update(id: i32, issue: Issue, conn: &SqliteConnection){
diesel::update(issues::table.find(id))
.set(&issue)
.execute(conn);
}
pub fn delete(id: i32, conn: &SqliteConnection){
diesel::delete(issues::table.find(id)).execute(conn);
}
}
#[derive(Insertable, Deserialize)]
#[table_name = "project"]
pub struct NewProject {
pub title: String,
pub complete: i32,
pub issue_count: i32,
}
impl NewProject{
pub fn insert(project: NewProject, conn: &SqliteConnection){
diesel::insert_into(project::table)
.values(project)
.execute(conn)
.expect("Error inserting new project");
}
}
#[derive(Queryable, Deserialize, Serialize, AsChangeset)]
#[table_name="projects"]
pub struct Project {
pub id: i32,
pub title: String,
pub complete: i32,
pub issue_count: i32,
}
impl Project{
pub fn all(conn: &SqliteConnection) -> Vec<Project>{
let projects = project::table
.load::<Project>(conn)
.expect("Error loading Projects");
//update issue count
for project in projects {
let new_count = all_issues
.filter(project_id.eq(project.id))
.count()
.get_result::<i64>(conn)
.unwrap() as i32;
if project.issue_count != new_count {
let update_project = diesel::update(all_projects.find(project.id));
let result = update_project.set(issue_count.eq(new_count)).execute(conn);
match result {
Ok(e) => println!("{}", e),
Err(_e) => panic!("update issue_count Failed"),
};
}
}
project::table
.load::<Project>(conn)
.expect("Error loading Projects")
}
pub fn get(conn: &SqliteConnection, id: i32) -> Project {
project::table.find(id).get_result::<Project>(conn).expect("Error: Failed Project query")
}
pub fn update(id: i32, project: Project, conn: &SqliteConnection) -> &str{
match diesel::update(projects::table.find(id))
.set(&project)
.execute(conn){
Ok(_e) => "Updated",
Err(_e) => "Failed to Update",
}
}
pub fn delete(id: i32, conn: &SqliteConnection) -> &str{
match diesel::delete(projects::table.find(id)).execute(conn){
Ok(_e) => "Delete Successful",
Err(_e) => "Delete Failed",
}
}
}
| true |
9098549b5edd3ca23b4e6f168cb343f9051097a9
|
Rust
|
bobfang1992/rsqlite
|
/src/varint.rs
|
UTF-8
| 4,293 | 3.484375 | 3 |
[] |
no_license
|
pub fn read_varint(bytes: &[u8]) -> (i64, usize) {
let mut varint: i64 = 0;
let mut bytes_read: usize = 0;
for (i, byte) in bytes.iter().enumerate().take(9) {
bytes_read += 1;
if i == 8 {
varint = (varint << 8) | *byte as i64;
break;
} else {
varint = (varint << 7) | (*byte & 0b0111_1111) as i64;
if *byte < 0b1000_0000 {
break;
}
}
}
(varint, bytes_read)
}
pub fn read_varint_byte_length(bytes: &[u8]) -> usize {
for (i, byte) in bytes.iter().enumerate().take(9) {
if *byte < 0b1000_0000 {
return i + 1;
}
}
9
}
pub fn serialize_to_varint(input: i64) -> Vec<u8> {
use std::collections::VecDeque;
let mut result: VecDeque<u8> = VecDeque::new();
let mut shifted_input = input;
if input as u64 > 0x00ff_ffff_ffff_ffff {
// we first push the entire last byte
result.push_front((shifted_input & 0b1111_1111) as u8);
shifted_input >>= 8;
}
for _ in 0..8 {
result.push_front((shifted_input & 0b0111_1111) as u8);
shifted_input >>= 7;
if result.len() > 1 {
let p = result.front_mut().unwrap();
*p |= 0b1000_0000;
}
if shifted_input == 0 {
// we reached the last one in case we don't use all 9 bytes.
break;
}
}
result.into()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn read_single_byte_varint() {
assert_eq!((1, 1), read_varint(&vec![0b00000001]));
assert_eq!((3, 1), read_varint(&vec![0b00000011]));
assert_eq!((7, 1), read_varint(&vec![0b00000111]));
assert_eq!((15, 1), read_varint(&vec![0b00001111]));
}
#[test]
fn read_two_byte_varint() {
assert_eq!((128, 2), read_varint(&vec![0b10000001, 0b00000000]));
assert_eq!((129, 2), read_varint(&vec![0b10000001, 0b00000001]));
assert_eq!((255, 2), read_varint(&vec![0b10000001, 0b01111111]));
}
#[test]
fn read_nine_byte_varint() {
assert_eq!((-1, 9), read_varint(&vec![0xff; 9]));
}
#[test]
fn read_varint_in_longer_bytes() {
assert_eq!((1, 1), read_varint(&vec![0x01; 10]));
assert_eq!((-1, 9), read_varint(&vec![0xff; 10]));
}
#[test]
fn serialize_simple_varints() {
assert_eq!(vec![0b00000001], serialize_to_varint(1));
assert_eq!(vec![0b00000011], serialize_to_varint(3));
}
#[test]
fn serialize_medium_length_varints() {
assert_eq!(
vec![0b10000010, 0b00000001],
serialize_to_varint(0b100000001)
)
}
#[test]
fn serialize_negative_varints() {
assert_eq!(vec![0xff; 9], serialize_to_varint(-1));
}
#[test]
fn read_varint_lengths() {
let bytes_vec: Vec<Vec<u8>> = vec![
vec![0x0f],
vec![0xff, 0x0f],
vec![0xff, 0xff, 0x0f],
vec![0xff, 0xff, 0xff, 0x0f],
vec![0xff, 0xff, 0xff, 0xff, 0x0f],
vec![0xff, 0xff, 0xff, 0xff, 0xff, 0x0f],
vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f],
vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f],
vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f],
// Next ones are exceeding the max length of a varint
vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0x0f],
vec![
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0x0f, 0x0f,
],
];
let expected_lengths = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 9];
for i in 0..bytes_vec.len() {
assert_eq!(expected_lengths[i], read_varint_byte_length(&bytes_vec[i]));
}
}
#[test]
fn test_noop() {
// doing serialze and deserialze should give the input back.:
let inputs: Vec<i64> = vec![
0x01,
0x1ff,
0x123456,
0x11223344,
0x1122334455,
0x112233445566,
0x11223344556677,
0x1928374655647382,
];
for input in inputs {
assert_eq!(input, read_varint(&serialize_to_varint(input)).0);
}
}
}
| true |
ad44c5f4e25c17a42fb086725565f06c40343d4f
|
Rust
|
benzyx/Elric
|
/src/protocol.rs
|
UTF-8
| 1,315 | 2.734375 | 3 |
[] |
no_license
|
use crate::utils::*;
use tokio::prelude::*;
const N_LENGTH_BYTES: usize = 4;
const LENGTH_LIMIT: usize = 100_000;
pub async fn framed_read<T: serde::de::DeserializeOwned>(stream: &mut (impl AsyncRead + Unpin)) -> Result<T> {
let mut length_bytes = [0; N_LENGTH_BYTES];
stream.read_exact(&mut length_bytes).await?;
let length: u32 = bincode::deserialize(&length_bytes)?;
let length = length as usize;
if length > LENGTH_LIMIT {
return Err(format!("reading: messages cannot be longer than {} bytes", LENGTH_LIMIT).into());
}
let mut buffer = vec![0; length];
if length > 0 {
stream.read_exact(&mut buffer).await.ann_err("reading message")?;
}
Ok(bincode::deserialize(&buffer).ann_err("deserializing message")?)
}
pub async fn framed_write<T: serde::Serialize>(stream: &mut (impl AsyncWrite + Unpin), msg: &T) -> R {
let msg = bincode::serialize(msg).ann_err("serializing message")?;
if msg.len() > LENGTH_LIMIT {
return Err(format!("writing: messages cannot be longer than {} bytes", LENGTH_LIMIT).into());
}
let length: u32 = msg.len() as u32;
stream
.write_all(&bincode::serialize(&length).ann_err("serializing message")?)
.await
.ann_err("writing length")?;
if length > 0 {
stream.write_all(&msg).await.ann_err("writing message")?;
}
Ok(())
}
| true |
8d206ba811ae32c03b6dee3a21aa34cd53e7c25f
|
Rust
|
informationsea/bgzip-rs
|
/bgzip/src/csi.rs
|
UTF-8
| 1,210 | 2.984375 | 3 |
[
"MIT"
] |
permissive
|
use std::convert::TryInto;
/// calculate bin given an alignment covering [beg,end) (zero-based, half-close-half-open)
pub fn reg2bin(beg: i64, end: i64, min_shift: u32, depth: u32) -> u32 {
let end = end - 1;
let mut s = min_shift;
let mut t = ((1 << (depth * 3)) - 1) / 7;
for l2 in 0..depth {
//eprintln!("depth: {}", l2);
let l = depth - l2;
if beg >> s == end >> s {
//eprintln!("value: {}", (t + (beg >> s)));
return (t + (beg >> s)).try_into().unwrap();
};
s += 3;
//let t2 = t;
t -= 1 << ((l - 1) * 3);
//eprintln!("t : {} -> {} / {} / {}", t2, t, l, 1 << (l * 3));
}
0
}
/// calculate the list of bins that may overlap with region [beg,end) (zero-based)
pub fn reg2bins(beg: i64, end: i64, min_shift: u32, depth: u32) -> Vec<u32> {
let mut bins: Vec<u32> = Vec::new();
let end = end - 1;
let mut s = min_shift + depth * 3;
let mut t = 0;
for l in 0..=depth {
let b = t + (beg >> s);
let e = t + (end >> s);
for i in b..=e {
bins.push(i.try_into().unwrap());
}
s -= 3;
t += 1 << (l * 3);
}
bins
}
| true |
a9a232ae845fe2cd83d520c32c0146d0703535f3
|
Rust
|
ekhall/computationbook-rust
|
/src/the_simplest_computers/finite_automata/nfa.rs
|
UTF-8
| 1,042 | 3 | 3 |
[] |
no_license
|
use std::collections::HashSet;
use std::hash::Hash;
use super::nfarulebook::{NFARulebook};
#[derive(Clone)]
pub struct NFA<T> {
current_state: HashSet<T>,
pub accept_states: HashSet<T>,
pub rulebook: NFARulebook<T>,
}
impl<T: Eq + Clone + Hash> NFA<T> {
pub fn new(current_state: &HashSet<T>, accept_states: &HashSet<T>, rulebook: &NFARulebook<T>) -> Self {
NFA{
current_state: current_state.clone(),
accept_states: accept_states.clone(),
rulebook: rulebook.clone()}
}
pub fn current_state(&self) -> HashSet<T> {
self.rulebook.follow_free_moves(&self.current_state)
}
pub fn accepting(&self) -> bool {
!self.current_state().is_disjoint(&self.accept_states)
}
pub fn read_character(&mut self, character: char) {
self.current_state = self.rulebook.next_states(&self.current_state(), character);
}
pub fn read_string(&mut self, s: &str) {
for c in s.chars() {
self.read_character(c);
}
}
}
| true |
62964eda8cdec44ea536adbe347f9ca54a4d9c0b
|
Rust
|
m-lima/passifier
|
/src/ops.rs
|
UTF-8
| 15,490 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
trait PathValidator {
fn valid(&self) -> anyhow::Result<&Self>;
}
impl PathValidator for &[String] {
fn valid(&self) -> anyhow::Result<&Self> {
if self.is_empty() {
Err(anyhow::anyhow!("Path is empty"))
} else {
Ok(self)
}
}
}
pub fn create(root: &mut store::Store, path: &[String], entry: store::Entry) -> anyhow::Result<()> {
fn to_entry(path: &[String], entry: store::Entry) -> store::Entry {
if path.is_empty() {
return entry;
}
path.iter().rev().fold(entry, |acc, curr| {
let mut store = store::Store::new();
store.create(String::from(curr), acc).unwrap();
store::Entry::Nested(store)
})
}
fn create_inner<'r, 'p>(
root: &'r mut store::Store,
path: &'p [String],
) -> (&'r mut store::Store, &'p [String]) {
if path.len() == 1 {
return (root, path);
}
if let Some(store::Entry::Nested(_)) = root.read(&path[0]) {
if let Some(store::Entry::Nested(inner)) = root.get(&path[0]) {
return create_inner(inner, &path[1..]);
} else {
unreachable!();
}
}
(root, path)
}
if let store::Entry::Nested(ref store) = entry {
if store.secrets().next().is_none() {
anyhow::bail!("Nothing to add");
}
}
let (root, rest) = create_inner(root, path.valid()?);
let entry = to_entry(&rest[1..], entry);
root.create(rest[0].to_owned(), entry)?;
Ok(())
}
pub fn read<'a>(root: &'a store::Store, path: &[String]) -> anyhow::Result<&'a store::Entry> {
fn read_inner<'a>(root: &'a store::Store, path: &[String]) -> Option<&'a store::Entry> {
root.read(&path[0]).and_then(|entry| {
if path.len() == 1 {
Some(entry)
} else if let store::Entry::Nested(inner) = entry {
read_inner(inner, &path[1..])
} else {
None
}
})
}
read_inner(root, path.valid()?).ok_or_else(|| anyhow::anyhow!("Not found"))
}
pub fn update(root: &mut store::Store, path: &[String], entry: store::Entry) -> anyhow::Result<()> {
fn update_inner<'r, 'p>(
root: &'r mut store::Store,
path: &'p [String],
) -> anyhow::Result<(&'r mut store::Store, &'p String)> {
if path.len() == 1 {
Ok((root, &path[0]))
} else if let Some(store::Entry::Nested(_)) = root.read(&path[0]) {
if let Some(store::Entry::Nested(inner)) = root.get(&path[0]) {
update_inner(inner, &path[1..])
} else {
unreachable!();
}
} else {
Err(anyhow::anyhow!("Not found"))
}
}
if let store::Entry::Nested(ref inner) = entry {
if inner.secrets().next().is_none() {
return delete(root, path);
}
}
let (root, path) = update_inner(root, path.valid()?)?;
root.update(path.clone(), entry)?;
Ok(())
}
pub fn delete(root: &mut store::Store, path: &[String]) -> anyhow::Result<()> {
fn delete_inner(root: &mut store::Store, path: &[String]) -> anyhow::Result<bool> {
fn delete_nested(root: &mut store::Store, path: &[String]) -> anyhow::Result<bool> {
match root.get(&path[0]) {
Some(store::Entry::Nested(inner)) => delete_inner(inner, &path[1..]),
Some(_) => anyhow::bail!("Invalid path"),
None => anyhow::bail!("Not found"),
}
}
if path.len() == 1 || delete_nested(root, path)? {
root.delete(&path[0])?;
Ok(root.secrets().next().is_none())
} else {
Ok(false)
}
}
delete_inner(root, path.valid()?).map(|_| ())
}
#[cfg(test)]
mod tests {
static MAP: &str = r#"{
"binary": [ 245, 107, 95, 100 ],
"nested": {
"inner": {
"deep": {
"foo": "bar"
}
},
"sibling": "inner_sibling"
},
"sibling": "outer_sibling"
}"#;
macro_rules! own {
($string:literal) => {
String::from($string)
};
(e $string:literal) => {
store::Entry::String(String::from($string))
};
}
macro_rules! path {
($($string:literal),*) => {
&[$(own!($string)),*]
};
}
macro_rules! parse {
($string:expr) => {
serde_json::from_str::<store::Store>($string)
.unwrap()
.into()
};
(e $string:literal) => {
store::Entry::Nested(parse!($string))
};
}
macro_rules! update_empty {
($path:expr) => {{
let mut updated = make_store();
let mut deleted = make_store();
super::update(&mut updated, $path, parse!(e "{}")).unwrap();
super::delete(&mut deleted, $path).unwrap();
assert_eq!(updated, deleted);
}};
}
macro_rules! delete {
($path:expr, $expected:literal) => {{
let mut store = make_store();
super::delete(&mut store, $path).unwrap();
assert_eq!(store, parse!($expected));
}};
}
fn make_store() -> store::Store {
parse!(MAP)
}
#[test]
fn create() {
use super::create;
let mut store = store::Store::new();
create(&mut store, path!["new"], own!(e "new_value")).unwrap();
assert_eq!(store, parse!(r#"{"new":"new_value"}"#));
create(&mut store, path!["foo"], own!(e "new_value")).unwrap();
assert_eq!(store, parse!(r#"{"new":"new_value","foo":"new_value"}"#));
create(&mut store, path!["nested", "inner", "foo"], own!(e "bar")).unwrap();
assert_eq!(
store,
parse!(r#"{"new":"new_value","foo":"new_value","nested":{"inner":{"foo":"bar"}}}"#)
);
create(
&mut store,
path!["nested", "other", "foo", "deep", "deeper"],
own!(e "here"),
)
.unwrap();
assert_eq!(
store,
parse!(
r#"{"new":"new_value","foo":"new_value","nested":{"inner":{"foo":"bar"},"other":{"foo":{"deep":{"deeper":"here"}}}}}"#
)
);
}
#[test]
fn create_conflict() {
use super::create;
let mut store = make_store();
assert!(create(&mut store, path!["binary"], own!(e "new_value")).is_err());
assert!(create(&mut store, path!["nested"], own!(e "new_value")).is_err());
assert!(create(&mut store, path!["nested", "sibling"], own!(e "new_value")).is_err());
assert!(create(
&mut store,
path!["nested", "sibling", "deep"],
own!(e "new_value")
)
.is_err());
}
#[test]
fn create_empty() {
use super::create;
let mut store = make_store();
assert!(create(&mut store, path!["nested"], parse!(e "{}")).is_err());
}
#[test]
fn read() {
use super::read;
let store = make_store();
assert_eq!(
read(&store, path!["binary"]).unwrap(),
&store::Entry::Binary(vec![245, 107, 95, 100])
);
assert_eq!(
read(&store, path!["nested"]).unwrap(),
store.read("nested").unwrap()
);
assert_eq!(
read(&store, path!["nested", "inner"]).unwrap(),
&parse!(e r#"{"deep":{"foo":"bar"}}"#)
);
assert_eq!(
read(&store, path!["nested", "inner", "deep"]).unwrap(),
&parse!(e r#"{"foo":"bar"}"#)
);
assert_eq!(
read(&store, path!["nested", "inner", "deep", "foo"]).unwrap(),
&own!(e "bar")
);
assert_eq!(
read(&store, path!["nested", "sibling"]).unwrap(),
&own!(e "inner_sibling")
);
assert_eq!(
read(&store, path!["binary"]).unwrap(),
&store::Entry::Binary(vec![245, 107, 95, 100])
);
assert_eq!(
read(&store, path!["sibling"]).unwrap(),
&own!(e "outer_sibling")
);
}
#[test]
fn read_not_found() {
use super::read;
let store = make_store();
assert!(read(&store, path!["bla"]).is_err());
assert!(read(&store, path!["binary", "245"]).is_err());
assert!(read(&store, path!["nested", "bla"]).is_err());
assert!(read(&store, path!["nested", "bla", "foo"]).is_err());
assert!(read(&store, path!["nested", "inner", "bla"]).is_err());
assert!(read(&store, path!["nested", "inner", "bla", "deep"]).is_err());
assert!(read(&store, path!["nested", "inner", "deep", "bla"]).is_err());
assert!(read(&store, path!["nested", "inner", "deep", "foo", "bla"]).is_err());
assert!(read(&store, path![""]).is_err());
}
#[test]
fn update() {
use super::update;
let mut store = make_store();
// update top level
update(&mut store, path!["binary"], own!(e "new")).unwrap();
assert_eq!(
store,
parse!(
r#"{
"binary": "new",
"nested": {
"inner": {
"deep": {
"foo": "bar"
}
},
"sibling": "inner_sibling"
},
"sibling": "outer_sibling"
}"#
)
);
// update deep
update(
&mut store,
path!["nested", "inner", "deep", "foo"],
own!(e "new"),
)
.unwrap();
assert_eq!(
store,
parse!(
r#"{
"binary": "new",
"nested": {
"inner": {
"deep": {
"foo": "new"
}
},
"sibling": "inner_sibling"
},
"sibling": "outer_sibling"
}"#
)
);
// update root of deep tree
update(&mut store, path!["nested"], own!(e "new")).unwrap();
assert_eq!(
store,
parse!(
r#"{
"binary": "new",
"nested": "new",
"sibling": "outer_sibling"
}"#
)
);
}
#[test]
fn update_empty_just_deletes() {
update_empty!(path!["binary"]);
update_empty!(path!["sibling"]);
update_empty!(path!["nested"]);
update_empty!(path!["nested", "sibling"]);
update_empty!(path!["nested", "inner"]);
update_empty!(path!["nested", "inner", "deep"]);
update_empty!(path!["nested", "inner", "deep", "foo"]);
}
#[test]
fn update_not_found() {
use super::update;
let mut store = make_store();
assert!(update(&mut store, path!["bla"], own!(e "")).is_err());
assert!(update(&mut store, path!["binary", "245"], own!(e "")).is_err());
assert!(update(&mut store, path!["nested", "bla"], own!(e "")).is_err());
assert!(update(&mut store, path!["nested", "bla", "foo"], own!(e "")).is_err());
assert!(update(&mut store, path!["nested", "inner", "bla"], own!(e "")).is_err());
assert!(update(
&mut store,
path!["nested", "inner", "bla", "deep"],
own!(e "")
)
.is_err());
assert!(update(
&mut store,
path!["nested", "inner", "deep", "bla"],
own!(e "")
)
.is_err());
assert!(update(
&mut store,
path!["nested", "inner", "deep", "foo", "bla"],
own!(e "")
)
.is_err());
assert!(update(&mut store, path![""], own!(e "")).is_err());
}
#[test]
fn delete() {
delete!(
path!["binary"],
r#"{
"nested": {
"inner": {
"deep": {
"foo": "bar"
}
},
"sibling": "inner_sibling"
},
"sibling": "outer_sibling"
}"#
);
delete!(
path!["sibling"],
r#"{
"binary": [ 245, 107, 95, 100 ],
"nested": {
"inner": {
"deep": {
"foo": "bar"
}
},
"sibling": "inner_sibling"
}
}"#
);
delete!(
path!["nested"],
r#"{
"binary": [ 245, 107, 95, 100 ],
"sibling": "outer_sibling"
}"#
);
delete!(
path!["nested", "sibling"],
r#"{
"binary": [ 245, 107, 95, 100 ],
"nested": {
"inner": {
"deep": {
"foo": "bar"
}
}
},
"sibling": "outer_sibling"
}"#
);
delete!(
path!["nested", "inner"],
r#"{
"binary": [ 245, 107, 95, 100 ],
"nested": {
"sibling": "inner_sibling"
},
"sibling": "outer_sibling"
}"#
);
delete!(
path!["nested", "inner", "deep"],
r#"{
"binary": [ 245, 107, 95, 100 ],
"nested": {
"sibling": "inner_sibling"
},
"sibling": "outer_sibling"
}"#
);
delete!(
path!["nested", "inner", "deep", "foo"],
r#"{
"binary": [ 245, 107, 95, 100 ],
"nested": {
"sibling": "inner_sibling"
},
"sibling": "outer_sibling"
}"#
);
}
#[test]
fn delete_not_found() {
use super::delete;
let mut store = make_store();
assert!(delete(&mut store, path!["bla"]).is_err());
assert!(delete(&mut store, path!["binary", "245"]).is_err());
assert!(delete(&mut store, path!["nested", "bla"]).is_err());
assert!(delete(&mut store, path!["nested", "bla", "foo"]).is_err());
assert!(delete(&mut store, path!["nested", "inner", "bla"]).is_err());
assert!(delete(&mut store, path!["nested", "inner", "bla", "deep"]).is_err());
assert!(delete(&mut store, path!["nested", "inner", "deep", "bla"]).is_err());
assert!(delete(&mut store, path!["nested", "inner", "deep", "foo", "bla"]).is_err());
assert!(delete(&mut store, path![""]).is_err());
}
}
| true |
eb4cf410dcf8e99cf45889f02a1943f8328b255d
|
Rust
|
NLincoln/worldgen
|
/src/main.rs
|
UTF-8
| 4,337 | 3.296875 | 3 |
[] |
no_license
|
extern crate rand;
use std::fmt::Display;
use rand::{Rng, ThreadRng};
#[derive(Debug, Copy, Clone)]
struct Vector2<T> {
x: T,
y: T,
}
macro_rules! vec2u {
($x: expr, $y: expr) => (
Vector2u {
x: $x,
y: $y
}
)
}
impl<T> Vector2<T> {
fn new(x: T, y: T) -> Vector2<T> {
Vector2 { x: x, y: y }
}
}
type Vector2u = Vector2<u32>;
struct Tile {
is_filled: bool,
}
impl std::fmt::Display for Tile {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let val = match self.is_filled {
true => 'X',
false => '_',
};
write!(f, "{}", val)
}
}
struct World {
grid: Grid<Tile>,
}
struct Grid<T> {
size: Vector2u,
map: std::vec::Vec<T>,
}
impl<T> Grid<T> {
fn get_slice(&self, position: Vector2u, size: Vector2u) -> Grid<&T> {
let mut grid = Grid {
size: size,
map: Vec::new(),
};
for y in 0..size.y {
for x in 0..size.x {
grid
.map
.push(self.at(vec2u!(position.x + x, position.y + y)));
}
}
return grid;
}
fn at(&self, pos: Vector2u) -> &T {
match self.map.get(((pos.y * self.size.x) + pos.x) as usize) {
Some(val) => val,
None => panic!("Position provided to Grid::at that is out of bounds"),
}
}
fn assign(&mut self, pos: Vector2u, val: T) {
if pos.x > self.size.x || pos.y > self.size.y {
panic!("Position provided to Grid::assign that is out of bounds");
}
self.map[((pos.y * self.size.x) + pos.x) as usize] = val;
}
}
impl<T> std::fmt::Display for Grid<T>
where
T: std::fmt::Display,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
for y in 0..self.size.y {
for x in 0..self.size.x {
if let Err(e) = write!(f, "{}", self.at(Vector2u::new(x, y))) {
return Err(e);
}
}
print!("\n");
}
return Ok(());
}
}
fn carve_room<T, F>(grid: &mut Grid<T>, position: Vector2u, size: Vector2u, f: F)
where
F: Fn(&Grid<T>, Vector2u) -> T,
{
let get_pos = |x: u32, y: u32| vec2u!(position.x + x, position.y + y);
for y in 0..size.y {
for x in 0..size.x {
let new_room = f(&grid, get_pos(x, y));
grid.assign(get_pos(x, y), new_room);
}
}
}
impl World {
fn new(size: Vector2u) -> World {
fn create_tile(rng: &mut ThreadRng) -> Tile {
Tile {
is_filled: rng.gen::<bool>(),
}
}
let mut map_vector = std::vec::Vec::<Tile>::new();
let mut rng = rand::thread_rng();
map_vector.reserve_exact((size.x * size.y) as usize);
for _ in 0..size.x {
for _ in 0..size.y {
map_vector.push(create_tile(&mut rng));
}
}
World {
grid: Grid {
size: size,
map: map_vector,
},
}
}
}
fn main() {
let world = World::new(vec2u!(100, 100));
println!("{}", world.grid);
}
#[cfg(test)]
mod tests {
use super::*;
fn create_grid_uint() -> Grid<u32> {
Grid {
size: Vector2u { x: 4, y: 4 },
map: vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
}
}
#[test]
fn test_carve_grid() {
let mut grid = create_grid_uint();
carve_room(&mut grid, vec2u!(1, 1), vec2u!(2, 2), |grid, pos| {
(*grid.at(pos) * 0)
});
assert_eq!(*grid.at(vec2u!(1, 1)), 0);
assert_eq!(*grid.at(vec2u!(0, 1)), 4);
carve_room(&mut grid, vec2u!(0, 0), vec2u!(4, 4), |grid, pos| {
(*grid.at(pos) + 1)
});
assert_eq!(*grid.at(vec2u!(0, 0)), 1);
}
#[test]
fn grid_slice_method_works() {
let grid = create_grid_uint();
let slice = grid.get_slice(Vector2u::new(1, 1), Vector2u::new(2, 2));
assert_eq!(*slice.at(vec2u!(0, 0)), grid.at(vec2u!(1, 1)));
assert_eq!(*slice.at(vec2u!(1, 0)), grid.at(vec2u!(2, 1)));
}
#[test]
fn grid_assign_method() {
let mut grid = create_grid_uint();
let pos = vec2u!(1, 1);
grid.assign(pos, 4);
assert_eq!(*grid.at(pos), 4);
}
#[test]
fn grid_at_method_works() {
let grid = create_grid_uint();
assert_eq!(*grid.at(Vector2u::new(1, 1)), 5);
assert_eq!(*grid.at(Vector2u::new(0, 0)), 0);
assert_eq!(*grid.at(Vector2u::new(3, 0)), 3);
}
#[test]
#[should_panic]
fn world_at_panics_on_out_of_bounds() {
let grid = create_grid_uint();
grid.at(Vector2u::new(5, 2));
}
}
| true |
ad447e1fb85c133838e76cb5ea92fe48660096d4
|
Rust
|
JulianKnodt/mireba
|
/src/sampler/uniform.rs
|
UTF-8
| 579 | 2.703125 | 3 |
[] |
no_license
|
use super::Sampler;
use quick_maths::{DefaultFloat, Vector};
use rand::{rngs::SmallRng, Rng, SeedableRng};
// https://rust-random.github.io/rand/rand/trait.SeedableRng.html#method.from_entropy
// https://rust-random.github.io/rand/rand/rngs/struct.SmallRng.html
#[derive(Debug)]
pub struct Uniform(SmallRng);
impl Sampler for Uniform {
fn new(seed: u64) -> Self { Uniform(SmallRng::seed_from_u64(seed)) }
fn sample(&mut self) -> DefaultFloat { self.0.gen() }
fn sample_vec<const N: usize>(&mut self) -> Vector<N, DefaultFloat> {
Vector::with(|_| self.sample())
}
}
| true |
da0e4533eb3cd2d26e5f4c5ca058195bf4d3f7be
|
Rust
|
kemurphy/compiler
|
/src/mas/labels.rs
|
UTF-8
| 2,155 | 2.734375 | 3 |
[] |
no_license
|
use mas::ast::*;
use std::collections::BTreeMap;
fn subst_label(target: &mut JumpTarget, idx: usize,
labels: &BTreeMap<String, usize>) {
let new_target = match *target {
JumpOffs(..) => target.clone(),
JumpLabel(ref name) => {
let label_idx = match labels.get(name) {
Some(pos) => *pos,
_ => panic!("Unresolved label {}", name),
};
JumpOffs((label_idx as i32) - (idx as i32))
}
};
*target = new_target;
}
// TODO: eliminate this code duplication.
fn subst_label_long(target: &mut LongValue,
labels: &BTreeMap<String, usize>,
offset: usize) {
let new_target = match *target {
Immediate(..) => target.clone(),
LabelOffs(ref name) => {
let label_idx = match labels.get(name) {
Some(pos) => *pos,
_ => panic!("Unresolved label {}", name),
};
// The offset here is in bytes, not packets, so we multiply by 16.
if *name == "__STACK_START__".to_string() {
// This label doesn't move with the code, so we (hackily)
// treat it differently.
// TODO: we may later want more references outside the code
// area, so we should be more systematic about this.
Immediate((label_idx * 16) as u32)
} else {
Immediate((label_idx * 16 + offset) as u32)
}
}
};
*target = new_target;
}
pub fn resolve_labels(insts: &mut Vec<InstPacket>,
labels: &BTreeMap<String, usize>,
offset: usize) {
for (count, ref mut packet) in insts.iter_mut().enumerate() {
for inst in packet.iter_mut() {
match *inst {
BranchImmInst(_, _, ref mut target) => {
subst_label(target, count, labels);
},
LongInst(ref mut target) => {
subst_label_long(target, labels, offset);
},
_ => {}
}
}
}
}
| true |
6de3fd2204e954339cd58681e8f253bd645a181b
|
Rust
|
RimeHorn/unoloco
|
/alex/src/deck.rs
|
UTF-8
| 352 | 2.9375 | 3 |
[] |
no_license
|
use::card;
const size :usize = 52;
#[derive(Copy, Clone,Getters)]
pub struct Deck {
cards : [card::Card;size],
index : usize, // numero de cartas que han sido repartidas
}
impl Deck {
fn size(&self) -> usize {
return size - self.index;
}
}
// deck() crear e inicializar una serie de cartas
// dealcard() remover una carta para darsela a un jugador
| true |
c9c3757f72cfe4cd86a8be37684039575608faa8
|
Rust
|
arizuk/programming-contest-solutions
|
/misc/n_p_k.rs
|
UTF-8
| 833 | 3.40625 | 3 |
[] |
no_license
|
fn perm(i: usize, k: usize, cur: usize, n: usize, res: &mut Vec<usize>) {
for j in cur..n {
res[i] = j;
if i == k - 1 {
let mut perm_res = vec![-1; k];
let print_fn = |a: &Vec<isize>| {
for &idx in a.iter() {
print!("{} ", res[idx as usize]);
}
println!();
};
_perm(&mut perm_res, 0, k, &print_fn);
} else {
perm(i + 1, k, j + 1, n, res);
}
}
}
fn _perm(res: &mut Vec<isize>, i: usize, n: usize, p: &Fn(&Vec<isize>)) {
if i == res.len() {
return p(res);
}
for idx in 0..n {
if res[idx] == -1 {
res[idx] = i as isize;
_perm(res, i + 1, n, p);
res[idx] = -1;
}
}
}
fn main() {
let n = 5;
let k = 3;
let mut res = vec![0; k];
println!("Prints {}P{} permutation.", n, k);
perm(0, k, 0, n, &mut res);
}
| true |
e73e98ec67571aad986a18072803654efcb87ed9
|
Rust
|
dustypomerleau/exercism
|
/rust/gigasecond/src/lib.rs
|
UTF-8
| 255 | 2.890625 | 3 |
[] |
no_license
|
use chrono::{DateTime, Duration, Utc};
// Returns a Utc DateTime one billion seconds after start.
pub fn after(start: DateTime<Utc>) -> DateTime<Utc> {
let s = Duration::seconds((10_i64).pow(9));
DateTime::checked_add_signed(start, s).unwrap()
}
| true |
3ab20bdd947296872e929902c4a5158e9018eb57
|
Rust
|
gladiopeace/diana
|
/src/errors.rs
|
UTF-8
| 5,384 | 3.0625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
#![allow(missing_docs)]
pub use error_chain::bail;
use error_chain::error_chain;
// TODO fix the integration errors
// The `error_chain` setup for the whole crate
// All systems use these errors, except for GraphQL resolvers, because they have to return a particular kind of error
error_chain! {
// The custom errors for this crate (very broad)
errors {
/// An environment variable had an invalid type.
/// E.g. a port was given as a hex string for some reason.
InvalidEnvVarType(var_name: String, expected: String) {
description("invalid environment variable type")
display(
"invalid environment variable type for variable '{var_name}', expected '{expected}'",
var_name=var_name,
expected=expected
)
}
/// A required part of the GraphQL context was not found.
GraphQLContextNotFound(elem_name: String) {
description("required graphql context element not found")
display("required graphql context element '{}' not found", elem_name)
}
/// A Mutex was poisoned (if `.lock()` failed).
MutexPoisoned(mutex_name: String) {
description("mutex poisoned")
display("mutex '{}' poisoned", mutex_name)
}
/// The subscriptions server failed to publish data it was asked to. This error is usually caused by an authentication failure.
SubscriptionDataPublishFailed {
description("failed to publish data to the subscriptions server")
display("failed to publish data to the subscriptions server, this is most likely due to an authentication failure")
}
/// An invalid indicator string was used when trying to convert a timestring into a datetime.
InvalidDatetimeIntervalIndicator(indicator: String) {
description("invalid indicator in timestring")
display("invalid indicator '{}' in timestring, must be one of: s, m, h, d, w, M, y", indicator)
}
/// There was an unauthorised access attempt.
Unauthorised {
description("unauthorised access attempt")
display("unable to comply with request due to lack of valid and sufficient authentication")
}
/// One or more required builder fields weren't set up.
IncompleteBuilderFields {
description("not all required builder fields were instantiated")
display("some required builder fields haven't been instantiated")
}
/// The creation of an HTTP response for Lambda or its derivatives failed.
HttpResponseBuilderFailed {
description("the builder for an http response (netlify_lambda_http) returned an error")
display("the builder for an http response (netlify_lambda_http) returned an error")
}
/// There was an attempt to create a subscriptions server without declaring its existence or configuration in the [Options].
InvokedSubscriptionsServerWithInvalidOptions {
description("you tried to create a subscriptions server without configuring it in the options")
display("you tried to create a subscriptions server without configuring it in the options")
}
/// There was an attempt to initialize the GraphiQL playground in a production environment.
AttemptedPlaygroundInProduction {
description("you tried to initialize the GraphQL playground in production, which is not supported due to authentication issues")
display("you tried to initialize the GraphQL playground in production, which is not supported due to authentication issues")
}
/// There was an error in one of the integrations.
IntegrationError(message: String, integration_name: String) {
description("an error occurred in one of Diana's integration libraries")
display(
"the following error occurred in the '{integration_name}' integration library: {message}",
integration_name=integration_name,
message=message
)
}
}
// We work with many external libraries, all of which have their own errors
foreign_links {
Io(::std::io::Error);
EnvVar(::std::env::VarError);
Reqwest(::reqwest::Error);
Json(::serde_json::Error);
JsonWebToken(::jsonwebtoken::errors::Error);
}
}
/// A wrapper around [`async_graphql::Result<T>`](async_graphql::Result).
/// You should use this as the return type for any of your own schemas that might return errors.
/// # Example
/// ```rust
/// use diana::errors::GQLResult;
///
/// async fn api_version() -> GQLResult<String> {
/// // Your code here
/// Ok("test".to_string())
/// }
/// ```
pub type GQLResult<T> = async_graphql::Result<T>;
/// A wrapper around [`async_graphql::Error`].
/// If any of your schemas need to explicitly create an error that only exists in them (and you're not using something like [mod@error_chain]),
/// you should use this.
/// # Example
/// ```rust
/// use diana::errors::{GQLResult, GQLError};
///
/// async fn api_version() -> GQLResult<String> {
/// let err = GQLError::new("Test error!");
/// // Your code here
/// Err(err)
/// }
/// ```
pub type GQLError = async_graphql::Error;
| true |
15d60d1a3a906caa836732b50d8a43745dcc6040
|
Rust
|
polyfloyd/audio-thing
|
/src/player/playback.rs
|
UTF-8
| 17,977 | 2.703125 | 3 |
[] |
no_license
|
use crate::audio::*;
use crate::filter::*;
use crate::player::output;
use sample;
use std::sync::{Arc, Condvar, Mutex};
use std::*;
#[derive(Debug)]
pub enum Event {
Position(u64),
State(State),
Tempo(f64),
Output(output::Event),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum State {
Playing,
Paused,
Stopped,
}
pub struct Playback {
pub stream: Box<output::Stream>,
sample_rate: u32,
flow_state: Arc<(Condvar, Mutex<State>)>,
sample_counter: Arc<Mutex<u64>>,
tempo: Option<Arc<Mutex<f64>>>,
seekable: Option<Arc<Mutex<Seekable + Send>>>,
event_handler: Arc<Fn(Event) + Send + Sync>,
}
impl Playback {
/// Initializes a new Playback. Playback should be started manually by setting the playstate to
/// Playing.
pub fn new(
audio: dynam::Audio,
output: &output::Output,
event_handler: Arc<Fn(Event) + Send + Sync>,
) -> Playback {
match audio {
dynam::Audio::Source(source) => Playback::from_source(source, output, event_handler),
dynam::Audio::Seek(seek) => Playback::from_seek(seek, output, event_handler),
}
}
fn from_source(
source: dynam::Source,
output: &output::Output,
event_handler: Arc<Fn(Event) + Send + Sync>,
) -> Playback {
let flow_state = Arc::new((Condvar::new(), Mutex::new(State::Paused)));
let sample_counter = Arc::new(Mutex::new(0));
fn with_control<I>(
source: I,
fs: &Arc<(Condvar, Mutex<State>)>,
sc: &Arc<Mutex<u64>>,
) -> Box<Source<Item = I::Item> + Send>
where
I: Source + Send + 'static,
I::Item: sample::Frame,
{
Box::from(source.flow_control(fs.clone()).count_samples(sc.clone()))
}
let source_out = match source {
dynam::Source::MonoI8(s) => {
dynam::Source::MonoI8(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoU8(s) => {
dynam::Source::MonoU8(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoI16(s) => {
dynam::Source::MonoI16(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoU16(s) => {
dynam::Source::MonoU16(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoI24(s) => {
dynam::Source::MonoI24(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoU24(s) => {
dynam::Source::MonoU24(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoI32(s) => {
dynam::Source::MonoI32(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoU32(s) => {
dynam::Source::MonoU32(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoI64(s) => {
dynam::Source::MonoI64(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoU64(s) => {
dynam::Source::MonoU64(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoF32(s) => {
dynam::Source::MonoF32(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::MonoF64(s) => {
dynam::Source::MonoF64(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoI8(s) => {
dynam::Source::StereoI8(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoU8(s) => {
dynam::Source::StereoU8(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoI16(s) => {
dynam::Source::StereoI16(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoU16(s) => {
dynam::Source::StereoU16(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoI24(s) => {
dynam::Source::StereoI24(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoU24(s) => {
dynam::Source::StereoU24(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoI32(s) => {
dynam::Source::StereoI32(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoU32(s) => {
dynam::Source::StereoU32(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoI64(s) => {
dynam::Source::StereoI64(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoU64(s) => {
dynam::Source::StereoU64(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoF32(s) => {
dynam::Source::StereoF32(with_control(s, &flow_state, &sample_counter))
}
dynam::Source::StereoF64(s) => {
dynam::Source::StereoF64(with_control(s, &flow_state, &sample_counter))
}
};
let eh_sub = event_handler.clone();
let sub_handler = Arc::new(move |event| {
if let output::Event::End = event {
eh_sub(Event::State(State::Stopped));
}
eh_sub(Event::Output(event));
});
Playback {
sample_rate: source_out.sample_rate(),
stream: output.consume(source_out, sub_handler).unwrap(),
flow_state,
sample_counter,
tempo: None,
seekable: None,
event_handler,
}
}
fn from_seek(
seek: dynam::Seek,
output: &output::Output,
event_handler: Arc<Fn(Event) + Send + Sync>,
) -> Playback {
let flow_state = Arc::new((Condvar::new(), Mutex::new(State::Paused)));
let sample_counter = Arc::new(Mutex::new(0));
let tempo = Arc::new(Mutex::new(1.0));
fn with_control<I>(
seek: I,
fs: &Arc<(Condvar, Mutex<State>)>,
sc: &Arc<Mutex<u64>>,
t: &Arc<Mutex<f64>>,
) -> (
Box<Source<Item = I::Item> + Send>,
Arc<Mutex<Seekable + Send>>,
)
where
I: Seek + Send + 'static,
I::Item: sample::Frame + Send,
<I::Item as sample::Frame>::Float: Send,
<I::Item as sample::Frame>::Sample: sample::ToSample<f64>
+ sample::FromSample<f64>
+ sample::FromSample<
<<I::Item as sample::Frame>::Float as sample::Frame>::Sample,
> + Send
+ 'static,
{
let shared_seek = seek.shared();
let mut_seek = shared_seek.input.clone();
let source_out = shared_seek
.stft(1024)
.adjust_tempo(t.clone())
.inverse()
.flow_control(fs.clone())
.count_samples(sc.clone());
(Box::from(source_out), mut_seek)
}
let (source_out, mut_seek) = match seek {
dynam::Seek::MonoI8(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoI8(o), m)
}
dynam::Seek::MonoU8(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoU8(o), m)
}
dynam::Seek::MonoI16(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoI16(o), m)
}
dynam::Seek::MonoU16(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoU16(o), m)
}
dynam::Seek::MonoI24(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoI24(o), m)
}
dynam::Seek::MonoU24(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoU24(o), m)
}
dynam::Seek::MonoI32(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoI32(o), m)
}
dynam::Seek::MonoU32(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoU32(o), m)
}
dynam::Seek::MonoI64(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoI64(o), m)
}
dynam::Seek::MonoU64(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoU64(o), m)
}
dynam::Seek::MonoF32(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoF32(o), m)
}
dynam::Seek::MonoF64(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::MonoF64(o), m)
}
dynam::Seek::StereoI8(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoI8(o), m)
}
dynam::Seek::StereoU8(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoU8(o), m)
}
dynam::Seek::StereoI16(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoI16(o), m)
}
dynam::Seek::StereoU16(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoU16(o), m)
}
dynam::Seek::StereoI24(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoI24(o), m)
}
dynam::Seek::StereoU24(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoU24(o), m)
}
dynam::Seek::StereoI32(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoI32(o), m)
}
dynam::Seek::StereoU32(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoU32(o), m)
}
dynam::Seek::StereoI64(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoI64(o), m)
}
dynam::Seek::StereoU64(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoU64(o), m)
}
dynam::Seek::StereoF32(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoF32(o), m)
}
dynam::Seek::StereoF64(s) => {
let (o, m) = with_control(s, &flow_state, &sample_counter, &tempo);
(dynam::Source::StereoF64(o), m)
}
};
let eh_sub = event_handler.clone();
let sub_handler = Arc::new(move |event| {
if let output::Event::End = event {
eh_sub(Event::State(State::Stopped));
}
eh_sub(Event::Output(event));
});
Playback {
sample_rate: source_out.sample_rate(),
stream: output.consume(source_out, sub_handler).unwrap(),
flow_state,
sample_counter,
tempo: Some(tempo),
seekable: Some(mut_seek),
event_handler,
}
}
/// Returns the total number of samples in of the playing audio if known.
pub fn duration(&self) -> Option<u64> {
self.seekable
.as_ref()
.map(|s| (*s.lock().unwrap()).length())
}
/// Returns the duration as a Duration if known.
pub fn duration_time(&self) -> Option<time::Duration> {
self.duration()
.map(|num_samples| duration_of(self.sample_rate, num_samples))
}
/// Returns the position of the sample that will be read next.
/// If The audio is infinite, this will simply be the total number of samples played.
pub fn position(&self) -> u64 {
self.seekable
.as_ref()
.map(|s| s.lock().unwrap().current_position())
.unwrap_or_else(|| *self.sample_counter.lock().unwrap())
}
/// Seeks to the sample at the specified position. If seeking is not supported, this is a
/// no-op.
pub fn set_position(&mut self, position: u64) {
self.seekable
.as_ref()
.map(|s| s.lock().unwrap().seek(position))
.unwrap_or(Ok(()))
.unwrap(); // FIXME
(self.event_handler)(Event::Position(position));
}
/// Seeks using a duration.
pub fn set_position_time(&mut self, timestamp: time::Duration) {
let secs = timestamp.as_secs() * u64::from(self.sample_rate);
self.set_position(secs);
}
/// Returns the current position as a Duration.
pub fn position_time(&self) -> time::Duration {
duration_of(self.sample_rate, self.position())
}
pub fn state(&self) -> State {
*self.flow_state.1.lock().unwrap()
}
pub fn set_state(&mut self, state: State) {
let &(ref cvar, ref lock) = &*self.flow_state;
let mut cur_state = lock.lock().unwrap();
if *cur_state != State::Stopped {
*cur_state = state;
}
cvar.notify_all();
(self.event_handler)(Event::State(state));
}
pub fn tempo(&self) -> f64 {
self.tempo
.as_ref()
.map(|t| *t.lock().unwrap())
.unwrap_or(1.0)
}
/// Sets the tempo for the currently playing audio.
/// This is a no-op if the tempo of the audio can not be altered or the tempo specified is
/// invalid: `tempo <= 0.0`.
pub fn set_tempo(&mut self, tempo: f64) {
if tempo > 0.0 {
if let Some(ref t) = self.tempo {
*t.lock().unwrap() = tempo;
(self.event_handler)(Event::Tempo(tempo));
}
}
}
}
/// FlowControl acts as a part of a signal pipeline allowing the flow to be paused and stopped.
/// Because pausing works by blocking any calls to next, `FlowControl` provides its own concurrency
/// method instead of recommending `audio::Shared`.
struct FlowControl<S>
where
S: Source,
S::Item: sample::Frame,
{
pub state: Arc<(Condvar, Mutex<State>)>,
input: S,
}
impl<S> iter::Iterator for FlowControl<S>
where
S: Source,
S::Item: sample::Frame,
{
type Item = S::Item;
fn next(&mut self) -> Option<Self::Item> {
let &(ref cvar, ref lock) = &*self.state;
let mut state = lock.lock().unwrap();
while *state == State::Paused {
state = cvar.wait(state).unwrap();
}
match *state {
State::Paused => unreachable!(),
State::Stopped => None,
State::Playing => {
let f = self.input.next();
if f.is_none() {
*state = State::Stopped;
}
f
}
}
}
}
impl<S> Source for FlowControl<S>
where
S: Source,
S::Item: sample::Frame,
{
fn sample_rate(&self) -> u32 {
self.input.sample_rate()
}
}
impl<S> Drop for FlowControl<S>
where
S: Source,
S::Item: sample::Frame,
{
// If the state is set to paused, another thread attempting to read from the stream is blocked.
// Here, we set the state to stopped when this FlowControl is dropped, so that the reading
// thread will never deadlock.
fn drop(&mut self) {
let &(ref cvar, ref lock) = &*self.state;
*lock.lock().unwrap() = State::Stopped;
cvar.notify_all();
}
}
trait IntoFlowControl: Source + Sized
where
Self::Item: sample::Frame,
{
fn flow_control(self, state: Arc<(Condvar, Mutex<State>)>) -> FlowControl<Self> {
FlowControl { state, input: self }
}
}
impl<T> IntoFlowControl for T
where
T: Source,
T::Item: sample::Frame,
{
}
struct SampleCounter<S>
where
S: Source,
S::Item: sample::Frame,
{
pub counter: Arc<Mutex<u64>>,
input: S,
}
impl<S> iter::Iterator for SampleCounter<S>
where
S: Source,
S::Item: sample::Frame,
{
type Item = S::Item;
fn next(&mut self) -> Option<Self::Item> {
let n = self.input.next();
if n.is_some() {
*self.counter.lock().unwrap() += 1;
}
n
}
}
impl<S> Source for SampleCounter<S>
where
S: Source,
S::Item: sample::Frame,
{
fn sample_rate(&self) -> u32 {
self.input.sample_rate()
}
}
trait IntoSampleCounter: Source + Sized
where
Self::Item: sample::Frame,
{
fn count_samples(self, counter: Arc<Mutex<u64>>) -> SampleCounter<Self> {
SampleCounter {
counter,
input: self,
}
}
}
impl<T> IntoSampleCounter for T
where
T: Source,
T::Item: sample::Frame,
{
}
| true |
b0d9e9d582f3bd190ed9f6d3ed52f0fb125ca0e6
|
Rust
|
takaya0/template
|
/rust/snip-eratos.rs
|
UTF-8
| 258 | 3.15625 | 3 |
[] |
no_license
|
fn Is_prime(num: i32)->bool{
if num <= 0{
return false;
}else{
let mut r = 2;
while r * r <= num{
if num % r == 0{
return false;
}
r += 1;
}
}
return true;
}
| true |
d5e3795cc444e6969088a40f784df70fb68ee9a2
|
Rust
|
gwy15/leetcode
|
/src/1103.分糖果-ii.rs
|
UTF-8
| 1,140 | 3.0625 | 3 |
[] |
no_license
|
/*
* @lc app=leetcode.cn id=1103 lang=rust
*
* [1103] 分糖果 II
*/
struct Solution;
// @lc code=start
impl Solution {
#[allow(unused)]
pub fn distribute_candies(candies: i32, num_people: i32) -> Vec<i32> {
let p = (candies as f64 * 2.0 + 0.25).sqrt() - 0.5;
let p = p.floor() as i32;
let remaining = candies - p * (p + 1) / 2;
let (rows, cols) = (p / num_people, p % num_people);
// distribute
let mut ans = vec![0; num_people as usize];
let common = num_people * rows * (rows - 1) / 2;
for i in 0..num_people {
ans[i as usize] = (i + 1) * rows + common;
if i < cols {
ans[i as usize] += i + 1 + rows * num_people;
}
}
ans[cols as usize] += remaining;
ans
}
}
// @lc code=end
#[test]
fn test_solution() {
macro_rules! test {
($candies:expr, $n:expr, $ans:tt) => {
assert_eq!(
Solution::distribute_candies($candies, $n),
vec!$ans
);
}
};
test!(7, 4, [1, 2, 3, 1]);
test!(10, 3, [5, 2, 3]);
}
| true |
071d0dd3749d0ab5aa9ff8590f28327fdc91f301
|
Rust
|
quark-zju/cargo-fixeq
|
/src/parse_code.rs
|
UTF-8
| 3,311 | 3.390625 | 3 |
[
"MIT"
] |
permissive
|
//! Parse Rust source code.
use proc_macro2::{LineColumn, TokenTree};
use syn::{spanned::Spanned, visit::Visit, ExprMacro, Ident};
/// Find locations of `assert_eq!`s from source code.
pub(crate) fn find_assert_eqs(code: &str) -> Vec<AssertEqLocation> {
let mut visitor = AssertEqVisitor::default();
if let Ok(syntax_tree) = syn::parse_file(&code) {
visitor.visit_file(&syntax_tree);
}
visitor.out
}
#[derive(Clone)]
pub(crate) struct Location {
pub(crate) start: LineColumn,
pub(crate) end: LineColumn,
}
#[derive(Debug, Clone)]
pub(crate) struct AssertEqLocation {
pub(crate) assert: Location,
pub(crate) rhs: Location,
}
#[derive(Default)]
struct AssertEqVisitor {
out: Vec<AssertEqLocation>,
}
impl<'ast> Visit<'ast> for AssertEqVisitor {
fn visit_expr_macro(&mut self, i: &'ast ExprMacro) {
let path = &i.mac.path;
if path.is_ident(&Ident::new("assert_eq", path.span())) {
let mut start = None;
let mut end = None;
let mut seen_comma = 0;
for token in i.mac.tokens.clone() {
if let TokenTree::Punct(ref p) = token {
if p.as_char() == ',' {
seen_comma += 1;
continue;
}
}
// assert_eq!(actual , expected , message, ...)
// ^ ^ ^ ^
// | start | seen_comma=2
// seen_comma=1 end
if seen_comma == 1 {
if start.is_none() && seen_comma == 1 {
start = Some(token.span().start());
}
if end.is_none() {
end = Some(token.span().end());
}
}
}
if let (Some(start), Some(end)) = (start, end) {
let rhs = Location { start, end };
let assert = Location {
start: i.span().start(),
end: i.span().end(),
};
self.out.push(AssertEqLocation { assert, rhs });
}
}
}
}
impl Location {
pub(crate) fn overlaps_line(&self, line: usize) -> bool {
self.start.line <= line && self.end.line >= line
}
}
use std::fmt;
impl fmt::Debug for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{},{}-{},{}",
self.start.line, self.start.column, self.end.line, self.end.column
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_find_assert_eqs() {
assert_eq!(
format!(
"{:#?}",
find_assert_eqs(
r#"
fn eq<T: Eq>(a: T, b: T) -> bool {
a == b
}
fn main() {
// single line
assert_eq!(true, true);
// multi-line
assert_eq!(
eq(1, 2),
eq(
eq(1, 2),
eq(2, 2),
),
);
}"#
)
),
r#"[
AssertEqLocation {
assert: 8,4-8,26,
rhs: 8,21-8,25,
},
AssertEqLocation {
assert: 11,4-17,5,
rhs: 13,8-13,10,
},
]"#
);
}
}
| true |
1e889515215e61838b3eba8dd9cdae97528018d0
|
Rust
|
Giovan/lumen
|
/lumen_runtime/src/otp/erlang/tests/is_boolean_1/with_atom.rs
|
UTF-8
| 407 | 2.734375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::*;
#[test]
fn without_true_or_false_is_false() {
let term = atom_unchecked("atom");
assert_eq!(erlang::is_boolean_1(term), false.into());
}
#[test]
fn with_true_is_true() {
let term = true.into();
assert_eq!(erlang::is_boolean_1(term), true.into());
}
#[test]
fn with_false_is_true() {
let term = false.into();
assert_eq!(erlang::is_boolean_1(term), true.into());
}
| true |
6fc6682989665cc64b3ff4bb81b7c7967faa39b1
|
Rust
|
ccin2p3/actiondb
|
/src/matcher/matcher/builder/builder.rs
|
UTF-8
| 1,165 | 2.765625 | 3 |
[] |
no_license
|
use matcher::pattern::{Pattern, PatternSource};
use matcher::pattern::testmessage::{self, TestMessage};
use matcher::Matcher;
use super::BuildError;
pub struct Builder;
impl Builder {
pub fn drain_into(from: &mut PatternSource, matcher: &mut Matcher) -> Result<(), BuildError>{
for pattern in from {
let mut pattern = try!(pattern);
let test_messages = Builder::extract_test_messages(&mut pattern);
matcher.add_pattern(pattern);
try!(Builder::check_test_messages(matcher, &test_messages));
}
Ok(())
}
fn extract_test_messages(pattern: &mut Pattern) -> Vec<TestMessage> {
let mut messages = Vec::new();
while let Some(test_message) = pattern.pop_test_message() {
messages.push(test_message);
}
messages
}
fn check_test_messages(matcher: &Matcher, messages: &[TestMessage]) -> Result<(), BuildError> {
for msg in messages {
let result = try!(matcher.parse(msg.message()).ok_or(testmessage::Error::TestMessageDoesntMatch));
try!(msg.test_pairs(result.pairs()));
}
Ok(())
}
}
| true |
01fa25fbc54f0ab202a330b7dc0d592e922343a9
|
Rust
|
magurotuna/leetcode-rust
|
/src/bin/199.rs
|
UTF-8
| 1,809 | 3.421875 | 3 |
[] |
no_license
|
use leetcode_rust::tree_node::*;
struct Solution;
impl Solution {
pub fn right_side_view(root: Node) -> Vec<i32> {
// traverse binary tree inorder
let mut ans = Vec::new();
dfs(root, &mut ans, 0);
ans
}
}
fn dfs(node: Node, vec: &mut Vec<i32>, depth: usize) {
if node.is_none() {
return;
}
let node = node.unwrap();
if vec.len() == depth {
vec.push(-1); // update later
}
dfs(node.borrow_mut().left.take(), vec, depth + 1);
vec[depth] = node.borrow().val;
dfs(node.borrow_mut().right.take(), vec, depth + 1);
}
fn main() {
()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn right_side_view() {
// 1
// / \
// 2 3
// \ \
// 5 4
let tree = make_node_with(
1,
make_node_with(2, None, make_node(5)),
make_node_with(3, None, make_node(4)),
);
assert_eq!(Solution::right_side_view(tree), vec![1, 3, 4]);
// 1
// / \
// 2 3
// \
// 5
let tree = make_node_with(1, make_node_with(2, None, make_node(5)), make_node(3));
assert_eq!(Solution::right_side_view(tree), vec![1, 3, 5]);
// 1
// /
// 2
// \
// 5
let tree = make_node_with(1, make_node_with(2, None, make_node(5)), None);
assert_eq!(Solution::right_side_view(tree), vec![1, 2, 5]);
// 1
// /
// 2
let tree = make_node_with(1, make_node(2), None);
assert_eq!(Solution::right_side_view(tree), vec![1, 2]);
// 1
let tree = make_node(1);
assert_eq!(Solution::right_side_view(tree), vec![1]);
}
}
| true |
5aeeed4d446ae7a468741963b0b131c33dd4d813
|
Rust
|
cataclysm-mods/manager
|
/src/cli.rs
|
UTF-8
| 1,537 | 2.796875 | 3 |
[] |
no_license
|
use clap::{App, Arg, SubCommand, crate_authors, crate_version};
use crate::NAME;
/// When called, parses ARGV using Clap
///
/// Example:
/// ```
/// use cataclysm_manager::cli::parse_arguments;
/// let args = vec!["cataclysm-manager", "--version"];
/// let argv = args.iter().map(|s| s.to_string()).collect();
/// let matches = parse_arguments(argv);
/// assert_eq!(matches.is_present("version"), true);
/// assert_eq!(matches.occurrences_of("version"), 1);
/// assert_eq!(matches.occurrences_of("help"), 0);
/// ```
pub fn parse_arguments(argv: Vec<String>) -> clap::ArgMatches<'static> {
let app = App::new(NAME)
.author(crate_authors!())
.version(crate_version!())
.arg(
Arg::with_name("log-level")
.help("Log level")
.long("log-level")
.default_value("info")
.possible_values(&["error", "warn", "info", "debug", "trace"]))
.subcommand(
SubCommand::with_name("releases")
.about("List or download available game releases.")
.subcommand(
SubCommand::with_name("list")
.about("List available game releases.")
.arg(
Arg::with_name("RELEASE_TYPE")
.required(true)
.help("Type of release to download.")
.possible_values(&["experimental", "stable"]))));
app.get_matches_from(argv)
}
| true |
86a58df8344af2da0ad5a636a9117e8e04f38943
|
Rust
|
max-niederman/luthien
|
/plugins/lib/rust/src/io.rs
|
UTF-8
| 477 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
//! # Luthien Plugin IO
//!
//! This module provides functions for reading and writing from Luthien's stdio.
use crate::Input;
use ipipe::{OnCleanup, Pipe};
pub use ipipe;
impl Input {
/// Get an [`ipipe::Pipe`] which can be used to read input and print output to the parent
/// Luthien process.
pub fn io(&self) -> Option<ipipe::Result<Pipe>> {
self.pipe_path
.as_ref()
.map(|path| Pipe::open(path, OnCleanup::NoDelete))
}
}
| true |
b2c9db27da6d5a68c5ea4fb176cda11188161952
|
Rust
|
Axect/Peroxide
|
/src/traits/math.rs
|
UTF-8
| 2,196 | 3.53125 | 4 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::structure::matrix::Matrix;
/// Mathematical Vector
///
/// # Description
/// Vector has two operations : addition, scalar multiplication.
/// And a space of the vector should closed for that operations.
pub trait Vector {
type Scalar;
fn add_vec<'a, 'b>(&'a self, rhs: &'b Self) -> Self;
fn sub_vec<'a, 'b>(&'a self, rhs: &'b Self) -> Self;
fn mul_scalar(&self, rhs: Self::Scalar) -> Self;
}
/// Kinds of Vector & Matrix norm
///
/// # Kinds of Vector norm
/// * `l1`
/// * `l2`
/// * `lp`
/// * `lInf`
///
/// # Kinds of Matrix norm
/// * `F`: Frobenius norm
/// * `lpq`: Element-wise pq norm
#[derive(Debug, Copy, Clone)]
pub enum Norm {
L1,
L2,
Lp(f64),
LInf,
F,
Lpq(f64, f64),
}
/// Normed Vector
pub trait Normed: Vector {
type UnsignedScalar;
fn norm(&self, kind: Norm) -> Self::UnsignedScalar;
fn normalize(&self, kind: Norm) -> Self
where
Self: Sized;
}
/// Inner product Vector
pub trait InnerProduct: Normed {
fn dot(&self, rhs: &Self) -> Self::Scalar;
}
/// Linear operation for Vector
pub trait LinearOp<T: Vector, S: Vector> {
fn apply(&self, rhs: &T) -> S;
}
/// Vector Products
pub trait VectorProduct: Vector {
fn cross(&self, other: &Self) -> Self;
fn outer(&self, other: &Self) -> Matrix;
}
/// Matrix Products
pub trait MatrixProduct {
fn kronecker(&self, other: &Self) -> Matrix;
fn hadamard(&self, other: &Self) -> Matrix;
}
// =============================================================================
// Implementation for primitive types
// =============================================================================
impl Vector for f64 {
type Scalar = Self;
fn add_vec<'a, 'b>(&'a self, rhs: &'b Self) -> Self {
self + rhs
}
fn sub_vec<'a, 'b>(&'a self, rhs: &'b Self) -> Self {
self - rhs
}
fn mul_scalar(&self, rhs: Self::Scalar) -> Self {
self * rhs
}
}
impl Normed for f64 {
type UnsignedScalar = f64;
fn norm(&self, _kind: Norm) -> Self::Scalar {
self.abs()
}
fn normalize(&self, _kind: Norm) -> Self
where
Self: Sized,
{
self / self.abs()
}
}
| true |
beee2d78bdfca4ae6afaa610b8be40107baecff6
|
Rust
|
juliocmalvares/skypiea
|
/src/filters/invert.rs
|
UTF-8
| 1,199 | 2.9375 | 3 |
[
"MIT"
] |
permissive
|
extern crate image;
use image::{GenericImageView,RgbImage};
#[derive(Debug)]
pub struct Invert {
img: RgbImage
}
impl Invert {
pub fn new(img: RgbImage) -> Invert{
Invert { img: img }
}
pub fn apply(&self) -> RgbImage {
let mut buffer: RgbImage = image::ImageBuffer::new(self.img.dimensions().0, self.img.dimensions().1);
for (i, j, pixel) in buffer.enumerate_pixels_mut() {
let pix = self.img.get_pixel(i, j);
// let average: u16 = (pix[0] as u16 + pix[1] as u16 + pix[2] as u16) / 3;
*pixel = image::Rgb([(u8::MAX - pix[0]) as u8, (u8::MAX - pix[1]) as u8, (u8::MAX - pix[2]) as u8]);
}
buffer
}
}
// #[test]
// #[warn(unused_must_use)]
// fn it_works () {
// let img = image::open("files/input/galaxy/andromeda.jpg").unwrap();
// let mut buffer: RgbImage = image::ImageBuffer::new(img.dimensions().0, img.dimensions().1);
// buffer = img.to_rgb();
// let grl = Invert::new(buffer);
// buffer = grl.apply();
// match buffer.save("files/output/andromeda-invert-test.png") {
// Ok(_) => (),
// Err(_) => panic!("Test Contrast failed")
// }
// }
| true |
1d9ed9a64e0b222ae64358ce191d6582f7a36161
|
Rust
|
pepyakin/chetyre
|
/servant/nrf51822/src/spis1/semstat.rs
|
UTF-8
| 2,230 | 2.796875 | 3 |
[] |
no_license
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::SEMSTAT {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = "Possible values of the field `SEMSTAT`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEMSTATR {
#[doc = "Semaphore is free."]
FREE,
#[doc = "Semaphore is assigned to the CPU."]
CPU,
#[doc = "Semaphore is assigned to the SPIS."]
SPIS,
#[doc = "Semaphore is assigned to the SPIS, but a handover to the CPU is pending."]
CPUPENDING,
}
impl SEMSTATR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
SEMSTATR::FREE => 0,
SEMSTATR::CPU => 1,
SEMSTATR::SPIS => 2,
SEMSTATR::CPUPENDING => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> SEMSTATR {
match value {
0 => SEMSTATR::FREE,
1 => SEMSTATR::CPU,
2 => SEMSTATR::SPIS,
3 => SEMSTATR::CPUPENDING,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `FREE`"]
#[inline]
pub fn is_free(&self) -> bool {
*self == SEMSTATR::FREE
}
#[doc = "Checks if the value of the field is `CPU`"]
#[inline]
pub fn is_cpu(&self) -> bool {
*self == SEMSTATR::CPU
}
#[doc = "Checks if the value of the field is `SPIS`"]
#[inline]
pub fn is_spis(&self) -> bool {
*self == SEMSTATR::SPIS
}
#[doc = "Checks if the value of the field is `CPUPENDING`"]
#[inline]
pub fn is_cpupending(&self) -> bool {
*self == SEMSTATR::CPUPENDING
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:1 - Semaphore status."]
#[inline]
pub fn semstat(&self) -> SEMSTATR {
SEMSTATR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
}
| true |
24144daaa73e92ed3caef7c250f2d3122ae9835f
|
Rust
|
46bit/advent-of-code
|
/2021/src/day22.rs
|
UTF-8
| 12,213 | 3.140625 | 3 |
[] |
no_license
|
use regex::Regex;
use std::fmt::Debug;
use std::collections::HashMap;
trait VoxelSpace {
type Voxel;
fn get(&self, x: i64, y: i64, z: i64) -> Self::Voxel;
fn act(
&mut self,
xs: impl Iterator<Item = i64>,
ys: impl Iterator<Item = i64>,
zs: impl Iterator<Item = i64>,
action: impl FnMut(&mut Self::Voxel),
);
}
// #[derive(Clone, Debug)]
// struct FiniteVoxelSpace<const SIZE: usize, VOXEL: Clone + Debug> {
// offset: i64,
// xyz_voxels: [[[VOXEL; SIZE]; SIZE]; SIZE],
// }
// impl<const SIZE: usize, VOXEL: Clone + Debug> VoxelSpace for FiniteVoxelSpace<SIZE, VOXEL> {
// type Voxel = VOXEL;
// fn get(&self, x: i64, y: i64, z: i64) -> Self::Voxel {
// println!("{} {} {}", x, y, z);
// let adjusted_x = x - self.offset;
// let adjusted_y = y - self.offset;
// let adjusted_z = z - self.offset;
// println!("{} {} {}", adjusted_x, adjusted_y, adjusted_z);
// self.xyz_voxels[adjusted_x as usize][adjusted_y as usize][adjusted_z as usize].clone()
// }
// fn act(
// &mut self,
// mut xs: impl Iterator<Item = i64>,
// mut ys: impl Iterator<Item = i64>,
// mut zs: impl Iterator<Item = i64>,
// mut action: impl FnMut(&mut Self::Voxel),
// ) {
// let max = SIZE as i64;
// for x in &mut xs {
// let adjusted_x = x - self.offset;
// for y in &mut ys {
// let adjusted_y = y - self.offset;
// for z in &mut zs {
// println!("{} {} {}", x, y, z);
// let adjusted_z = z - self.offset;
// if adjusted_x < 0 || adjusted_x > max {
// continue;
// }
// if adjusted_y < 0 || adjusted_y > max {
// continue;
// }
// if adjusted_z < 0 || adjusted_z > max {
// continue;
// }
// println!("adjusted: {} {} {}", adjusted_x, adjusted_y, adjusted_z);
// action(&mut self.xyz_voxels[adjusted_x as usize][adjusted_y as usize][adjusted_z as usize]);
// }
// }
// }
// }
// }
#[derive(Clone, Debug)]
struct InfiniteVoxelSpace<VOXEL: Clone + Debug + Default> {
xyz_voxels: HashMap<(i64, i64, i64), VOXEL>,
}
impl<VOXEL: Clone + Debug + Default> VoxelSpace for InfiniteVoxelSpace<VOXEL> {
type Voxel = VOXEL;
fn get(&self, x: i64, y: i64, z: i64) -> Self::Voxel {
if self.xyz_voxels.contains_key(&(x, y, z)) {
self.xyz_voxels[&(x, y, z)].clone()
} else {
Self::Voxel::default()
}
}
fn act(
&mut self,
mut xs: impl Iterator<Item = i64>,
mut ys: impl Iterator<Item = i64>,
mut zs: impl Iterator<Item = i64>,
mut action: impl FnMut(&mut Self::Voxel),
) {
for x in &mut xs {
for y in &mut ys {
for z in &mut zs {
let mut entry = self.xyz_voxels.entry((x, y, z)).or_insert(Self::Voxel::default());
action(&mut entry);
}
}
}
}
}
#[aoc(day22, part1)]
fn part1(input: &str) -> u64 {
// let mut voxel_space = InfiniteVoxelSpace{
// xyz_voxels: HashMap::new(),
// };
let mut voxels = HashMap::new();
let re = Regex::new(r"(?P<state>[a-z]+) x=(?P<x1>[-0-9]+)..(?P<x2>[-0-9]+),y=(?P<y1>[-0-9]+)..(?P<y2>[-0-9]+),z=(?P<z1>[-0-9]+)..(?P<z2>[-0-9]+)").unwrap();
for line in input.lines() {
let caps = re.captures(line).unwrap();
let state = match caps.name("state").unwrap().as_str() {
"on" => true,
"off" => false,
_ => panic!("input unparseable"),
};
//let f = |s: &mut bool| *s = state;
let x1 = caps.name("x1").unwrap().as_str().parse().unwrap();
let x2 = caps.name("x2").unwrap().as_str().parse().unwrap();
let xs = x1..=x2;
let y1 = caps.name("y1").unwrap().as_str().parse().unwrap();
let y2 = caps.name("y2").unwrap().as_str().parse().unwrap();
let ys = y1..=y2;
let z1 = caps.name("z1").unwrap().as_str().parse().unwrap();
let z2 = caps.name("z2").unwrap().as_str().parse().unwrap();
let zs = z1..=z2;
//voxel_space.act(xs, ys, zs, f);
for x in xs.clone() {
if x < -50 || x > 50 {
continue;
}
for y in ys.clone() {
if y < -50 || y > 50 {
continue;
}
for z in zs.clone() {
if z < -50 || z > 50 {
continue;
}
voxels.insert((x, y, z), state);
}
}
}
}
let mut number_of_cubes_on = 0;
for x in -50..=50 {
for y in -50..=50 {
for z in -50..=50 {
//if voxel_space.get(x, y, z) {
let key = (x, y, z);
if voxels.contains_key(&key) && voxels[&key] == true {
number_of_cubes_on += 1;
}
}
}
}
return number_of_cubes_on;
}
#[derive(Clone, Debug)]
struct Square {
left: i64,
right: i64,
top: i64,
bottom: i64,
rear: i64,
fore: i64,
}
impl Square {
fn intersects(&self, other: &Square) -> bool {
if other.right < self.left || self.right < other.left {
return false;
}
if other.bottom < self.top || self.bottom < other.top {
return false;
}
if other.fore < self.rear || self.fore < other.rear {
return false;
}
true
}
// fn empty(&self) -> bool {
// self.left == self.right || self.top == self.bottom
// }
fn squares(&self) -> Vec<(i64, i64, i64)> {
let mut s = vec![];
for x in self.left..=self.right {
for y in self.top..=self.bottom {
for z in self.rear..=self.fore {
s.push((x, y, z));
}
}
}
return s;
}
fn squares_count(&self) -> u64 {
let xn = (self.right - self.left + 1).abs() as u64;
let yn = (self.bottom - self.top + 1).abs() as u64;
let zn = (self.fore - self.rear + 1).abs() as u64;
xn * yn * zn
}
fn subtract(&self, mut other: Square) -> Vec<Square> {
if !self.intersects(&other) {
return vec![self.clone()];
}
if other.left < self.left {
other.left = self.left;
}
if other.top < self.top {
other.top = self.top;
}
if other.rear < self.rear {
other.rear = self.rear;
}
if other.right > self.right {
other.right = self.right;
}
if other.bottom > self.bottom {
other.bottom = self.bottom;
}
if other.fore > self.fore {
other.fore = self.fore;
}
let mut squares = vec![];
// LEFT SQUARE
if other.left > self.left {
squares.push(Square{
left: self.left,
right: other.left - 1,
top: self.top,
bottom: self.bottom,
rear: self.rear,
fore: self.fore,
});
}
// RIGHT SQUARE
if self.right > other.right {
squares.push(Square{
left: other.right + 1,
right: self.right,
top: self.top,
bottom: self.bottom,
rear: self.rear,
fore: self.fore,
});
}
// TOP MIDDLE
if other.top > self.top {
squares.push(Square{
left: other.left,
right: other.right,
top: self.top,
bottom: other.top - 1,
rear: self.rear,
fore: self.fore,
});
}
// BOTTOM MIDDLE
if self.bottom > other.bottom {
squares.push(Square{
left: other.left,
right: other.right,
top: other.bottom + 1,
bottom: self.bottom,
rear: self.rear,
fore: self.fore,
});
}
// REAR MIDDLE
if other.rear > self.rear {
squares.push(Square{
left: other.left,
right: other.right,
top: other.top,
bottom: other.bottom,
rear: self.rear,
fore: other.rear - 1,
});
}
// FORE MIDDLE
if self.fore > other.fore {
squares.push(Square{
left: other.left,
right: other.right,
top: other.top,
bottom: other.bottom,
rear: other.fore + 1,
fore: self.fore,
});
}
return squares;//.into_iter().filter(Square::empty).collect();
}
}
#[aoc(day22, part2)]
fn part2(input: &str) -> u64 {
let s1 = Square{
left: -5,
right: 5,
top: -5,
bottom: 5,
rear: 0,
fore: 0,
};
assert_eq!(s1.squares().len(), 121);
let s2 = Square{
left: -2,
right: 2,
top: -2,
bottom: 2,
rear: 0,
fore: 0,
};
assert_eq!(s2.squares().len(), 25);
let s1s = s1.subtract(s2);
assert_eq!(s1s.clone().into_iter().map(|s| s.squares().len()).sum::<usize>(), 121 - 25);
// for s in s1s {
// println!("1. {:?}", s);
// println!("2. {:?}", s.squares());
// }
let mut squares = vec![];
let re = Regex::new(r"(?P<state>[a-z]+) x=(?P<x1>[-0-9]+)..(?P<x2>[-0-9]+),y=(?P<y1>[-0-9]+)..(?P<y2>[-0-9]+),z=(?P<z1>[-0-9]+)..(?P<z2>[-0-9]+)").unwrap();
for line in input.lines() {
println!("squares={} {}", squares.len(), line);
let caps = re.captures(line).unwrap();
let state = match caps.name("state").unwrap().as_str() {
"on" => true,
"off" => false,
_ => panic!("input unparseable"),
};
let x1 = caps.name("x1").unwrap().as_str().parse().unwrap();
let x2 = caps.name("x2").unwrap().as_str().parse().unwrap();
let y1 = caps.name("y1").unwrap().as_str().parse().unwrap();
let y2 = caps.name("y2").unwrap().as_str().parse().unwrap();
let z1 = caps.name("z1").unwrap().as_str().parse().unwrap();
let z2 = caps.name("z2").unwrap().as_str().parse().unwrap();
let square = Square{
left: x1,
right: x2,
top: y1,
bottom: y2,
rear: z1,
fore: z2,
};
if state {
squares.push(square);
} else {
squares = squares.into_iter().map(|s| s.subtract(square.clone())).flatten().collect();
}
}
for s in squares.clone() {
println!("1. {:?}", s);
//println!(" {:?}", s.squares());
}
// deduplicate square overlap
// let mut cont = true;
// while cont {
// for r1 in squares.clone() {
// for r2 in squares.clone() {
// if r1 == r2 {
// squares = squares.into_iter().concat(r1.subtract(r2).into_iter()).collect();
// }
// }
// }
// }
let mut n = 0;
for (i, square) in squares.clone().into_iter().enumerate() {
let mut s3 = vec![square];
for (j, r2) in squares.clone().into_iter().enumerate() {
if i <= j {
continue;
}
s3 = s3.into_iter().map(|r| r.subtract(r2.clone())).flatten().collect();
}
for z in s3 {
n += z.squares_count();
}
}
return n;
//squares[0].subtract(squares[1])
//return squares.into_iter().map(|s| s.squares_count()).sum();
}
| true |
3adaeb6248d1fbbb864bf3404da07a214e310e4e
|
Rust
|
alexcrichton/wasmtime
|
/crates/environ/src/component/info.rs
|
UTF-8
| 21,575 | 3.25 | 3 |
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
// General runtime type-information about a component.
//
// Compared to the `Module` structure for core wasm this type is pretty
// significantly different. The core wasm `Module` corresponds roughly 1-to-1
// with the structure of the wasm module itself, but instead a `Component` is
// more of a "compiled" representation where the original structure is thrown
// away in favor of a more optimized representation. The considerations for this
// are:
//
// * This representation of a `Component` avoids the need to create a
// `PrimaryMap` of some form for each of the index spaces within a component.
// This is less so an issue about allocations and moreso that this information
// generally just isn't needed any time after instantiation. Avoiding creating
// these altogether helps components be lighter weight at runtime and
// additionally accelerates instantiation.
//
// * Components can have arbitrary nesting and internally do instantiations via
// string-based matching. At instantiation-time, though, we want to do as few
// string-lookups in hash maps as much as we can since they're significantly
// slower than index-based lookups. Furthermore while the imports of a
// component are not statically known the rest of the structure of the
// component is statically known which enables the ability to track precisely
// what matches up where and do all the string lookups at compile time instead
// of instantiation time.
//
// * Finally by performing this sort of dataflow analysis we are capable of
// identifying what adapters need trampolines for compilation or fusion. For
// example this tracks when host functions are lowered which enables us to
// enumerate what trampolines are required to enter into a component.
// Additionally (eventually) this will track all of the "fused" adapter
// functions where a function from one component instance is lifted and then
// lowered into another component instance. Altogether this enables Wasmtime's
// AOT-compilation where the artifact from compilation is suitable for use in
// running the component without the support of a compiler at runtime.
//
// Note, however, that the current design of `Component` has fundamental
// limitations which it was not designed for. For example there is no feasible
// way to implement either importing or exporting a component itself from the
// root component. Currently we rely on the ability to have static knowledge of
// what's coming from the host which at this point can only be either functions
// or core wasm modules. Additionally one flat list of initializers for a
// component are produced instead of initializers-per-component which would
// otherwise be required to export a component from a component.
//
// For now this tradeoff is made as it aligns well with the intended use case
// for components in an embedding. This may need to be revisited though if the
// requirements of embeddings change over time.
use crate::component::*;
use crate::{EntityIndex, PrimaryMap, SignatureIndex};
use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
/// Run-time-type-information about a `Component`, its structure, and how to
/// instantiate it.
///
/// This type is intended to mirror the `Module` type in this crate which
/// provides all the runtime information about the structure of a module and
/// how it works.
///
/// NB: Lots of the component model is not yet implemented in the runtime so
/// this is going to undergo a lot of churn.
#[derive(Default, Debug, Serialize, Deserialize)]
pub struct Component {
/// A list of typed values that this component imports.
///
/// Note that each name is given an `ImportIndex` here for the next map to
/// refer back to.
pub import_types: PrimaryMap<ImportIndex, (String, TypeDef)>,
/// A list of "flattened" imports that are used by this instance.
///
/// This import map represents extracting imports, as necessary, from the
/// general imported types by this component. The flattening here refers to
/// extracting items from instances. Currently the flat imports are either a
/// host function or a core wasm module.
///
/// For example if `ImportIndex(0)` pointed to an instance then this import
/// map represent extracting names from that map, for example extracting an
/// exported module or an exported function.
///
/// Each import item is keyed by a `RuntimeImportIndex` which is referred to
/// by types below whenever something refers to an import. The value for
/// each `RuntimeImportIndex` in this map is the `ImportIndex` for where
/// this items comes from (which can be associated with a name above in the
/// `import_types` array) as well as the list of export names if
/// `ImportIndex` refers to an instance. The export names array represents
/// recursively fetching names within an instance.
//
// TODO: this is probably a lot of `String` storage and may be something
// that needs optimization in the future. For example instead of lots of
// different `String` allocations this could instead be a pointer/length
// into one large string allocation for the entire component. Alternatively
// strings could otherwise be globally intern'd via some other mechanism to
// avoid `Linker`-specific intern-ing plus intern-ing here. Unsure what the
// best route is or whether such an optimization is even necessary here.
pub imports: PrimaryMap<RuntimeImportIndex, (ImportIndex, Vec<String>)>,
/// A list of this component's exports, indexed by either position or name.
pub exports: IndexMap<String, Export>,
/// Initializers that must be processed when instantiating this component.
///
/// This list of initializers does not correspond directly to the component
/// itself. The general goal with this is that the recursive nature of
/// components is "flattened" with an array like this which is a linear
/// sequence of instructions of how to instantiate a component. This will
/// have instantiations, for example, in addition to entries which
/// initialize `VMComponentContext` fields with previously instantiated
/// instances.
pub initializers: Vec<GlobalInitializer>,
/// The number of runtime instances (maximum `RuntimeInstanceIndex`) created
/// when instantiating this component.
pub num_runtime_instances: u32,
/// Same as `num_runtime_instances`, but for `RuntimeComponentInstanceIndex`
/// instead.
pub num_runtime_component_instances: u32,
/// The number of runtime memories (maximum `RuntimeMemoryIndex`) needed to
/// instantiate this component.
///
/// Note that this many memories will be stored in the `VMComponentContext`
/// and each memory is intended to be unique (e.g. the same memory isn't
/// stored in two different locations).
pub num_runtime_memories: u32,
/// The number of runtime reallocs (maximum `RuntimeReallocIndex`) needed to
/// instantiate this component.
///
/// Note that this many function pointers will be stored in the
/// `VMComponentContext`.
pub num_runtime_reallocs: u32,
/// Same as `num_runtime_reallocs`, but for post-return functions.
pub num_runtime_post_returns: u32,
/// The number of lowered host functions (maximum `LoweredIndex`) needed to
/// instantiate this component.
pub num_lowerings: u32,
/// The number of modules that are required to be saved within an instance
/// at runtime, or effectively the number of exported modules.
pub num_runtime_modules: u32,
/// The number of functions which "always trap" used to implement
/// `canon.lower` of `canon.lift`'d functions within the same component.
pub num_always_trap: u32,
/// The number of host transcoder functions needed for strings in adapter
/// modules.
pub num_transcoders: u32,
}
/// GlobalInitializer instructions to get processed when instantiating a component
///
/// The variants of this enum are processed during the instantiation phase of
/// a component in-order from front-to-back. These are otherwise emitted as a
/// component is parsed and read and translated.
//
// FIXME(#2639) if processing this list is ever a bottleneck we could
// theoretically use cranelift to compile an initialization function which
// performs all of these duties for us and skips the overhead of interpreting
// all of these instructions.
#[derive(Debug, Serialize, Deserialize)]
pub enum GlobalInitializer {
/// A core wasm module is being instantiated.
///
/// This will result in a new core wasm instance being created, which may
/// involve running the `start` function of the instance as well if it's
/// specified. This largely delegates to the same standard instantiation
/// process as the rest of the core wasm machinery already uses.
InstantiateModule(InstantiateModule),
/// A host function is being lowered, creating a core wasm function.
///
/// This initializer entry is intended to be used to fill out the
/// `VMComponentContext` and information about this lowering such as the
/// cranelift-compiled trampoline function pointer, the host function
/// pointer the trampoline calls, and the canonical ABI options.
LowerImport(LowerImport),
/// A core wasm function was "generated" via `canon lower` of a function
/// that was `canon lift`'d in the same component, meaning that the function
/// always traps. This is recorded within the `VMComponentContext` as a new
/// `VMCallerCheckedFuncRef` that's available for use.
AlwaysTrap(AlwaysTrap),
/// A core wasm linear memory is going to be saved into the
/// `VMComponentContext`.
///
/// This instruction indicates that the `index`th core wasm linear memory
/// needs to be extracted from the `export` specified, a pointer to a
/// previously created module instance, and stored into the
/// `VMComponentContext` at the `index` specified. This lowering is then
/// used in the future by pointers from `CanonicalOptions`.
ExtractMemory(ExtractMemory),
/// Same as `ExtractMemory`, except it's extracting a function pointer to be
/// used as a `realloc` function.
ExtractRealloc(ExtractRealloc),
/// Same as `ExtractMemory`, except it's extracting a function pointer to be
/// used as a `post-return` function.
ExtractPostReturn(ExtractPostReturn),
/// The `module` specified is saved into the runtime state at the next
/// `RuntimeModuleIndex`, referred to later by `Export` definitions.
SaveStaticModule(StaticModuleIndex),
/// Same as `SaveModuleUpvar`, but for imports.
SaveModuleImport(RuntimeImportIndex),
/// Similar to `ExtractMemory` and friends and indicates that a
/// `VMCallerCheckedFuncRef` needs to be initialized for a transcoder
/// function and this will later be used to instantiate an adapter module.
Transcoder(Transcoder),
}
/// Metadata for extraction of a memory of what's being extracted and where it's
/// going.
#[derive(Debug, Serialize, Deserialize)]
pub struct ExtractMemory {
/// The index of the memory being defined.
pub index: RuntimeMemoryIndex,
/// Where this memory is being extracted from.
pub export: CoreExport<MemoryIndex>,
}
/// Same as `ExtractMemory` but for the `realloc` canonical option.
#[derive(Debug, Serialize, Deserialize)]
pub struct ExtractRealloc {
/// The index of the realloc being defined.
pub index: RuntimeReallocIndex,
/// Where this realloc is being extracted from.
pub def: CoreDef,
}
/// Same as `ExtractMemory` but for the `post-return` canonical option.
#[derive(Debug, Serialize, Deserialize)]
pub struct ExtractPostReturn {
/// The index of the post-return being defined.
pub index: RuntimePostReturnIndex,
/// Where this post-return is being extracted from.
pub def: CoreDef,
}
/// Different methods of instantiating a core wasm module.
#[derive(Debug, Serialize, Deserialize)]
pub enum InstantiateModule {
/// A module defined within this component is being instantiated.
///
/// Note that this is distinct from the case of imported modules because the
/// order of imports required is statically known and can be pre-calculated
/// to avoid string lookups related to names at runtime, represented by the
/// flat list of arguments here.
Static(StaticModuleIndex, Box<[CoreDef]>),
/// An imported module is being instantiated.
///
/// This is similar to `Upvar` but notably the imports are provided as a
/// two-level named map since import resolution order needs to happen at
/// runtime.
Import(
RuntimeImportIndex,
IndexMap<String, IndexMap<String, CoreDef>>,
),
}
/// Description of a lowered import used in conjunction with
/// `GlobalInitializer::LowerImport`.
#[derive(Debug, Serialize, Deserialize)]
pub struct LowerImport {
/// The index of the lowered function that's being created.
///
/// This is guaranteed to be the `n`th `LowerImport` instruction
/// if the index is `n`.
pub index: LoweredIndex,
/// The index of the imported host function that is being lowered.
///
/// It's guaranteed that this `RuntimeImportIndex` points to a function.
pub import: RuntimeImportIndex,
/// The core wasm signature of the function that's being created.
pub canonical_abi: SignatureIndex,
/// The canonical ABI options used when lowering this function specified in
/// the original component.
pub options: CanonicalOptions,
}
/// Description of what to initialize when a `GlobalInitializer::AlwaysTrap` is
/// encountered.
#[derive(Debug, Serialize, Deserialize)]
pub struct AlwaysTrap {
/// The index of the function that is being initialized in the
/// `VMComponentContext`.
pub index: RuntimeAlwaysTrapIndex,
/// The core wasm signature of the function that's inserted.
pub canonical_abi: SignatureIndex,
}
/// Definition of a core wasm item and where it can come from within a
/// component.
///
/// Note that this is sort of a result of data-flow-like analysis on a component
/// during compile time of the component itself. References to core wasm items
/// are "compiled" to either referring to a previous instance or to some sort of
/// lowered host import.
#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq)]
pub enum CoreDef {
/// This item refers to an export of a previously instantiated core wasm
/// instance.
Export(CoreExport<EntityIndex>),
/// This item is a core wasm function with the index specified here. Note
/// that this `LoweredIndex` corresponds to the nth
/// `GlobalInitializer::LowerImport` instruction.
Lowered(LoweredIndex),
/// This is used to represent a degenerate case of where a `canon lift`'d
/// function is immediately `canon lower`'d in the same instance. Such a
/// function always traps at runtime.
AlwaysTrap(RuntimeAlwaysTrapIndex),
/// This is a reference to a wasm global which represents the
/// runtime-managed flags for a wasm instance.
InstanceFlags(RuntimeComponentInstanceIndex),
/// This refers to a cranelift-generated trampoline which calls to a
/// host-defined transcoding function.
Transcoder(RuntimeTranscoderIndex),
}
impl<T> From<CoreExport<T>> for CoreDef
where
EntityIndex: From<T>,
{
fn from(export: CoreExport<T>) -> CoreDef {
CoreDef::Export(export.map_index(|i| i.into()))
}
}
/// Identifier of an exported item from a core WebAssembly module instance.
///
/// Note that the `T` here is the index type for exports which can be
/// identified by index. The `T` is monomorphized with types like
/// [`EntityIndex`] or [`FuncIndex`].
#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq)]
pub struct CoreExport<T> {
/// The instance that this item is located within.
///
/// Note that this is intended to index the `instances` map within a
/// component. It's validated ahead of time that all instance pointers
/// refer only to previously-created instances.
pub instance: RuntimeInstanceIndex,
/// The item that this export is referencing, either by name or by index.
pub item: ExportItem<T>,
}
impl<T> CoreExport<T> {
/// Maps the index type `T` to another type `U` if this export item indeed
/// refers to an index `T`.
pub fn map_index<U>(self, f: impl FnOnce(T) -> U) -> CoreExport<U> {
CoreExport {
instance: self.instance,
item: match self.item {
ExportItem::Index(i) => ExportItem::Index(f(i)),
ExportItem::Name(s) => ExportItem::Name(s),
},
}
}
}
/// An index at which to find an item within a runtime instance.
#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq)]
pub enum ExportItem<T> {
/// An exact index that the target can be found at.
///
/// This is used where possible to avoid name lookups at runtime during the
/// instantiation process. This can only be used on instances where the
/// module was statically known at compile time, however.
Index(T),
/// An item which is identified by a name, so at runtime we need to
/// perform a name lookup to determine the index that the item is located
/// at.
///
/// This is used for instantiations of imported modules, for example, since
/// the precise shape of the module is not known.
Name(String),
}
/// Possible exports from a component.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Export {
/// A lifted function being exported which is an adaptation of a core wasm
/// function.
LiftedFunction {
/// The component function type of the function being created.
ty: TypeFuncIndex,
/// Which core WebAssembly export is being lifted.
func: CoreDef,
/// Any options, if present, associated with this lifting.
options: CanonicalOptions,
},
/// A module defined within this component is exported.
///
/// The module index here indexes a module recorded with
/// `GlobalInitializer::SaveModule` above.
Module(RuntimeModuleIndex),
/// A nested instance is being exported which has recursively defined
/// `Export` items.
Instance(IndexMap<String, Export>),
/// An exported type from a component or instance, currently only
/// informational.
Type(TypeDef),
}
/// Canonical ABI options associated with a lifted or lowered function.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CanonicalOptions {
/// The component instance that this bundle was associated with.
pub instance: RuntimeComponentInstanceIndex,
/// The encoding used for strings.
pub string_encoding: StringEncoding,
/// The memory used by these options, if specified.
pub memory: Option<RuntimeMemoryIndex>,
/// The realloc function used by these options, if specified.
pub realloc: Option<RuntimeReallocIndex>,
/// The post-return function used by these options, if specified.
pub post_return: Option<RuntimePostReturnIndex>,
}
/// Possible encodings of strings within the component model.
//
// Note that the `repr(u8)` is load-bearing here since this is used in an
// `extern "C" fn()` function argument which is called from cranelift-compiled
// code so we must know the representation of this.
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[allow(missing_docs)]
#[repr(u8)]
pub enum StringEncoding {
Utf8,
Utf16,
CompactUtf16,
}
/// Information about a string transcoding function required by an adapter
/// module.
///
/// A transcoder is used when strings are passed between adapter modules,
/// optionally changing string encodings at the same time. The transcoder is
/// implemented in a few different layers:
///
/// * Each generated adapter module has some glue around invoking the transcoder
/// represented by this item. This involves bounds-checks and handling
/// `realloc` for example.
/// * Each transcoder gets a cranelift-generated trampoline which has the
/// appropriate signature for the adapter module in question. Existence of
/// this initializer indicates that this should be compiled by Cranelift.
/// * The cranelift-generated trampoline will invoke a "transcoder libcall"
/// which is implemented natively in Rust that has a signature independent of
/// memory64 configuration options for example.
#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq)]
pub struct Transcoder {
/// The index of the transcoder being defined and initialized.
///
/// This indicates which `VMCallerCheckedFuncRef` slot is written to in a
/// `VMComponentContext`.
pub index: RuntimeTranscoderIndex,
/// The transcoding operation being performed.
pub op: Transcode,
/// The linear memory that the string is being read from.
pub from: RuntimeMemoryIndex,
/// Whether or not the source linear memory is 64-bit or not.
pub from64: bool,
/// The linear memory that the string is being written to.
pub to: RuntimeMemoryIndex,
/// Whether or not the destination linear memory is 64-bit or not.
pub to64: bool,
/// The wasm signature of the cranelift-generated trampoline.
pub signature: SignatureIndex,
}
pub use crate::fact::{FixedEncoding, Transcode};
| true |
82235e797bc8959dc6a5567ff9fb4cd3281b4b4b
|
Rust
|
bertptrs/adventofcode
|
/2022/src/day19.rs
|
UTF-8
| 8,485 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use anyhow::Result;
use nom::bytes::complete::tag;
use nom::character::complete::multispace1;
use nom::character::streaming::alpha1;
use nom::combinator::map_res;
use nom::combinator::opt;
use nom::multi::many1;
use nom::sequence::delimited;
use nom::sequence::preceded;
use nom::sequence::separated_pair;
use nom::sequence::terminated;
use nom::sequence::tuple;
use nom::IResult;
use crate::common::parse_input;
#[repr(usize)]
#[derive(Clone, Copy)]
enum Mineral {
Ore,
Clay,
Obsidian,
Geode,
}
impl TryFrom<&'_ [u8]> for Mineral {
type Error = String;
fn try_from(value: &'_ [u8]) -> std::result::Result<Self, Self::Error> {
match value {
b"ore" => Ok(Self::Ore),
b"clay" => Ok(Self::Clay),
b"obsidian" => Ok(Self::Obsidian),
b"geode" => Ok(Self::Geode),
other => Err(format!(
"Invalid mineral '{}'",
String::from_utf8_lossy(other)
)),
}
}
}
#[derive(Debug)]
struct BluePrint {
id: u32,
costs: [[u8; 3]; 4],
}
impl BluePrint {
pub fn max_geodes(&self, time: u8) -> u8 {
/// How much would we produce if all we did was produce geode robots for the remaining time
fn ideal(remaining: u32) -> u32 {
if remaining <= 1 {
0
} else {
(remaining - 1) * remaining / 2
}
}
#[derive(Eq, PartialEq)]
struct State {
missed: u32,
got: u8,
time_left: u8,
resources: [u8; 3],
machines: [u8; 3],
}
impl Ord for State {
fn cmp(&self, other: &Self) -> Ordering {
Ordering::Equal
.then(other.missed.cmp(&self.missed))
.then(self.got.cmp(&other.got))
.then(self.time_left.cmp(&other.time_left))
.then(self.machines.cmp(&other.machines))
}
}
impl PartialOrd for State {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
let max_needed = self.max_needed();
let mut todo = BinaryHeap::new();
let mut best = 0;
todo.push(State {
missed: 0,
got: 0,
time_left: time,
resources: [0; 3],
machines: [1, 0, 0],
});
while let Some(State {
missed,
got,
time_left,
resources,
machines,
}) = todo.pop()
{
let ideal_from_now = ideal(u32::from(time_left));
// Need to check again because we might've gotten a better result in the meantime.
if u32::from(best - got) >= ideal_from_now {
continue;
}
assert!(todo.len() <= 1_000_000, "Safety: got a todo list of len {}, best: {best}",
todo.len());
for (element, &costs) in self.costs.iter().enumerate() {
let Some(min_to_build) = self.until_buildable(costs, resources, machines) else { break };
// +1 because we need a turn to build
let built_after = min_to_build + 1;
if built_after >= time_left {
continue;
}
// Ideally, would be written as a nice `array::from_fn`. It turns out that codegen
// for `array::from_fn` is very bad, and writing it out into this for loop reduces
// time taken by approximately 100%.
let mut resources_after = [0; 3];
for i in 0..3 {
resources_after[i] = resources[i] + machines[i] * built_after - costs[i];
}
let time_after = time_left - built_after;
if element == Mineral::Geode as usize {
let new_got = got + time_after;
best = best.max(new_got);
if u32::from(best - new_got) >= ideal(time_after.into()) {
continue;
}
todo.push(State {
missed,
got: new_got,
time_left: time_after,
resources: resources_after,
machines,
});
best = best.max(new_got);
} else {
if machines[element] >= max_needed[element]
|| u32::from(best - got) >= ideal(time_after.into())
{
continue;
}
let mut new_machines = machines;
new_machines[element] += 1;
let new_missed = ideal_from_now - ideal(u32::from(time_after));
todo.push(State {
missed: new_missed,
got,
time_left: time_after,
resources: resources_after,
machines: new_machines,
})
}
}
}
best
}
#[inline]
fn until_buildable(&self, costs: [u8; 3], resources: [u8; 3], machines: [u8; 3]) -> Option<u8> {
let mut min_to_build = 0;
for ((&cost, &avail), &machine) in costs.iter().zip(&resources).zip(&machines) {
if cost > avail {
if machine == 0 {
return None;
} else {
min_to_build = min_to_build.max((cost - avail + machine - 1) / machine);
}
}
}
Some(min_to_build)
}
fn max_needed(&self) -> [u8; 3] {
let mut max_needed = [0; 3];
for cost in &self.costs {
for (max, &new) in max_needed.iter_mut().zip(cost) {
*max = (*max).max(new);
}
}
max_needed
}
}
fn parse_blueprint(input: &[u8]) -> IResult<&[u8], BluePrint> {
use nom::character::complete::u32;
fn parse_mineral(input: &[u8]) -> IResult<&[u8], Mineral> {
map_res(alpha1, Mineral::try_from)(input)
}
fn parse_cost(input: &[u8]) -> IResult<&[u8], (u8, Mineral)> {
separated_pair(nom::character::complete::u8, tag(" "), parse_mineral)(input)
}
let (mut input, id) =
terminated(delimited(tag("Blueprint "), u32, tag(":")), multispace1)(input)?;
let mut costs: [[u8; 3]; 4] = Default::default();
let mut parse_robot = terminated(
tuple((
preceded(tag("Each "), parse_mineral),
preceded(tag(" robot costs "), parse_cost),
terminated(opt(preceded(tag(" and "), parse_cost)), tag(".")),
)),
multispace1,
);
for _ in 0..4 {
let (remaining, (element, (amount1, req1), cost2)) = parse_robot(input)?;
input = remaining;
costs[element as usize][req1 as usize] = amount1;
if let Some((amount2, req2)) = cost2 {
costs[element as usize][req2 as usize] = amount2;
}
}
Ok((input, BluePrint { id, costs }))
}
pub fn part1(input: &[u8]) -> Result<String> {
let blueprints = parse_input(input, many1(parse_blueprint))?;
Ok(blueprints
.into_iter()
.map(|bp| u32::from(bp.max_geodes(24)) * bp.id)
.sum::<u32>()
.to_string())
}
pub fn part2(input: &[u8]) -> Result<String> {
let blueprints = parse_input(input, many1(parse_blueprint))?;
let result: u32 = blueprints
.iter()
.take(3)
.map(|bp| u32::from(bp.max_geodes(32)))
.product();
Ok(result.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
const SAMPLE: &[u8] = include_bytes!("./samples/19.txt");
fn get_samples() -> Vec<BluePrint> {
parse_input(SAMPLE, many1(parse_blueprint)).unwrap()
}
#[test]
fn sample_part1() {
let samples = get_samples();
assert_eq!(samples[0].max_geodes(24), 9);
assert_eq!(samples[1].max_geodes(24), 12);
assert_eq!(part1(SAMPLE).unwrap(), "33");
}
#[test]
fn sample_part2() {
let samples = get_samples();
assert_eq!(samples[0].max_geodes(32), 56);
assert_eq!(samples[1].max_geodes(32), 62);
}
}
| true |
9b8536c4c4219cac949bcbf42310fb2fafbaf77e
|
Rust
|
cseidman/transaction_processor
|
/src/sqlops.rs
|
UTF-8
| 11,285 | 2.84375 | 3 |
[] |
no_license
|
use rusqlite::*;
use std::process ;
use std::fs::File;
use std::io::Read;
use crate::processor::{Trade, Client, Dispute};
pub fn get_connection() -> Connection {
Connection::open("transact_db").expect("Unable to establish SQLite connection")
}
pub fn read_file(fname: &str) -> Option<String> {
// Open the file in read-only mode.
match File::open(fname) {
Ok(mut file) => {
let mut text = String::new();
file.read_to_string(&mut text).unwrap();
return Some(text) ;
},
Err(error) => {
eprintln!("Error opening file {}: {}", fname, error);
},
}
None
}
// Rebuilds all the objects in the database from scratch
pub fn build_database() {
let conn = get_connection() ;
let sql = read_file("sql/transact.sql") ;
if sql.is_none() {
eprintln!("Unable to parse sql file") ;
panic!() ;
}
let res = conn.execute_batch(sql.unwrap().as_str()) ;
if res.is_err() {
eprintln!("SQL build failed") ;
process::exit(1);
} else {
eprintln!("SQL build succeeded") ;
}
}
pub fn get_trade_transaction(conn: &Transaction, transaction_id: u32) -> Option<Trade> {
let mut pstmt = conn.prepare("select * from trade_event where event_type in ('withdrawal','deposit') and event_id = ?").unwrap();
let trn = pstmt.query_map([transaction_id], |row| {
Ok( Trade {
Transaction_type: row.get(2)?,
Client_id: row.get(1)?,
Transaction_id: row.get(0)?,
Amount: row.get(3).unwrap(),
})
}).unwrap();
let mut vtrans: Vec<Trade> = Vec::new() ;
for t in trn {
vtrans.push(t.unwrap());
}
if vtrans.len()== 0 {
return None ;
}
Some(vtrans[0].clone())
}
pub fn get_output(conn: &Connection) -> Vec<Client> {
let mut pstmt = conn.prepare("select * from client_account").unwrap();
let trn = pstmt.query_map([], |row| {
Ok( Client {
client_id: row.get(0)?,
available: row.get(1)?,
total: row.get(2)?,
held: row.get(3)?,
locked: row.get(4)?
})
}).unwrap();
let mut vcli: Vec<Client> = Vec::new() ;
for t in trn {
vcli.push(t.unwrap());
}
vcli
}
pub fn get_account(conn: &Connection, client_id: u16) -> Option<Client> {
let mut pstmt = conn.prepare("select * from client_account where client_id = ?").unwrap();
let trn = pstmt.query_map([client_id], |row| {
Ok( Client {
client_id: row.get(0)?,
available: row.get(1)?,
total: row.get(2)?,
held: row.get(3)?,
locked: row.get(4)?
})
}).unwrap();
let mut vcli: Vec<Client> = Vec::new() ;
for t in trn {
vcli.push(t.unwrap());
}
if vcli.len()== 0 {
return None ;
}
Some(vcli[0])
}
/**
Gets open disputes only - no resolved or charged back ones
*/
pub fn get_dispute(conn: &Transaction, client_id: u16, transaction_id: u32 ) -> Option<Dispute>{
let mut pstmt = conn.prepare("select * from dispute where client_id = ? and event_id = ? and status = 'disputed'").unwrap();
let trn = pstmt.query_map(params![client_id, transaction_id], |row| {
Ok( Dispute {
client_id: row.get(0)?,
event_id: row.get(1)?,
amount: row.get(2)?,
status: row.get(3)?
})
}).expect("Unable to retrieve open dispute data");
let mut vdisp: Vec<Dispute> = Vec::new() ;
for t in trn {
vdisp.push(t.unwrap());
}
if vdisp.len()== 0 {
return None ;
}
Some(vdisp[0].clone())
}
pub fn insert_transaction(conn: &mut Connection, t: &Trade) {
let opt_acct = get_account(conn, t.Client_id) ;
// If the account is blocked, then nothing can be done with it - just go on to the next one
if opt_acct.is_some() && opt_acct.unwrap().locked {
eprintln!("Client account {} is locked. We won't process transaction {}", t.Client_id, t.Transaction_id) ;
return;
}
let trx = conn.transaction().unwrap() ;
// First update the account:
// If there is no client .. we open an account using this transaction (if it's a deposit)
if opt_acct.is_none() {
// open the account with the first deposit
if t.Transaction_type == "deposit" {
trx.execute("insert into client_account (client_id, available, total, held, locked) values (?1, ?2, ?3, ?4, ?5)", params![t.Client_id, t.Amount, t.Amount, 0, false])
.expect("Failed to open new account");
} else {
// This shouldn't happen .. but let's warn anyway since we can't open an account with anything other than a deposit
// in that case, we discard the transaction
eprintln!("Account # {} not open to process {} in transaction # {}", t.Client_id, t.Transaction_type, t.Transaction_id) ;
}
} else {
// If we got here, we know the account has a value
let acct = opt_acct.unwrap() ;
match t.Transaction_type.as_str() {
"deposit" => {
let available = acct.available + t.Amount.unwrap() ;
let total = acct.total + t.Amount.unwrap() ;
trx.execute("update client_account set available =?1, total= ?2 where client_id = ?3", params![available, total, t.Client_id]).expect("Failed to update account") ;
},
"withdrawal" => {
let available = acct.available - t.Amount.unwrap() ;
if available >= 0.0 {
let total = acct.total - t.Amount.unwrap() ;
trx.execute("update client_account set available =?1, total= ?2 where client_id =?3", params![available, total, t.Client_id]).expect("Error updating account") ;
}
},
"dispute" => {
let t_original = get_trade_transaction(&trx, t.Transaction_id) ;
// Did we find the transaction we're disputing? If so, we proceed to handle the dispute
if t_original.is_some() {
let t_orig = t_original.unwrap() ;
// Assumption: Client is only going to dispute withdrawals. The amount would have
// already been debited from the balance, so there's no need to subtract it again.
// We do need to update the held amount (and by extension the total)
if t_orig.Transaction_type == String::from("withdrawal") {
let held = acct.held + t_orig.Amount.unwrap();
let total = acct.total + t_orig.Amount.unwrap();
trx.execute("update client_account set held =?1, total = ?2 where client_id =?3;", params![held, total, t.Client_id]).expect("Error updating account") ;
trx.execute("insert into dispute values (?1, ?2, ?3, ?4);",params![t.Client_id, t.Transaction_id, t_orig.Amount,"disputed"]).expect("Error updating dispute" );
}
} else {
// This shouldn't happen very often, but it's not a fatal error, so we just ignore this transaction
eprintln!("Cannot find transaction {} to dispute", t.Transaction_id) ;
}
},
"resolve" => {
let t_original = get_trade_transaction(&trx, t.Transaction_id) ;
// Again we can resolve this transaction only if there is a corresponding transaction to resolve
if t_original.is_some() {
let t_orig = t_original.unwrap() ;
// Make sure there's an existing dispute to resolve
let dispute = get_dispute(&trx,t_orig.Client_id, t_orig.Transaction_id) ;
// Rule: if there is no pending dispute, then ignore this record
if dispute.is_none() {
return ;
}
// The issue has been resolved in favor of the trading firm, so we simply let
// the withdrawal amount stand and we reduce the hold amount as well as the total
if t_orig.Transaction_type == String::from("withdrawal") {
let held = acct.held - t_orig.Amount.unwrap();
let total = acct.total - t_orig.Amount.unwrap();
trx.execute("update client_account set held = ?1, total = ?2 where client_id =?3",
params![held, total, t.Client_id]).expect("Error resolving dispute") ;
trx.execute("update dispute set status='resolved' where client_id = ?1 and event_id = ?2",
params![t.Client_id, t.Transaction_id]).expect("Unable to update dispute") ;
}
} else {
eprintln!("Cannot find transaction {} nor the dispute to resolve", t.Transaction_id) ;
}
},
"chargeback" => {
let t_original = get_trade_transaction(&trx, t.Transaction_id) ;
if t_original.is_some() {
let t_orig = t_original.unwrap() ;
// Make sure there's an issue to resolve
let dispute = get_dispute(&trx, t_orig.Client_id, t_orig.Transaction_id) ;
// Rule: if there is no pending dispute, then ignore this record
if dispute.is_none() {
return ;
}
// The issue has been resolved in favor of the client and so we credit the
// customer and lock the account
if t_orig.Transaction_type.as_str() == "withdrawal" {
let held = acct.held - t_orig.Amount.unwrap();
let available = acct.available + t_orig.Amount.unwrap();
trx.execute("update client_account set held = ?1, available = ?2, locked = 1 where client_id =?3",
params![held, available, t.Client_id]).expect("Error resolving dispute") ;
trx.execute("update dispute set status='chargedback' where client_id = ?1 and event_id = ?2",
params![t.Client_id, t.Transaction_id]).expect("Unable to update dispute status") ;
}
}
},
_ => panic!("Unknown transaction type .. (this should be impossible!)")
}
}
// Add the transaction for deposits and withdrawals only - the other events are used to manage a dispute table
if t.Transaction_type.as_str() == "deposit" || t.Transaction_type.as_str() == "withdrawal" {
let res = trx.execute("insert into trade_event (event_id, client_id, event_type, amount) values (?1, ?2, ?3, ?4)",
params![t.Transaction_id, t.Client_id, t.Transaction_type, t.Amount]);
if res.is_err() {
eprintln!("Failed to record transaction # {}", t.Transaction_id);
}
}
trx.commit().expect("Commit failed");
}
| true |
8c302a404a203b766ffa7653d0fe830f5ed1af95
|
Rust
|
BenViridian/trashy_bot
|
/src/commands/lastfm.rs
|
UTF-8
| 9,276 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use crate::models::lastfm::Lastfm;
use crate::util::get_client;
use crate::LASTFM_API_KEY;
use serenity::prelude::*;
use serenity::{
framework::standard::{macros::command, Args, CommandResult},
model::channel::Message,
};
use tracing::info;
#[command]
#[description = "Link your lastfm account to your discord account"]
#[example = "HansTrashy"]
#[usage = "*lastfmusername*"]
#[num_args(1)]
pub async fn register(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {
let username = args.single::<String>()?;
if let Ok(user) = Lastfm::get(
&mut *get_client(&ctx).await?,
*msg.author.id.as_u64() as i64,
)
.await
{
let lastfm = Lastfm::update(&mut *get_client(&ctx).await?, user.id, username).await?;
msg.reply(
ctx,
format!("Updated your lastfm username to {}", lastfm.username),
)
.await?;
} else {
let lastfm = Lastfm::create(
&mut *get_client(&ctx).await?,
*msg.author.id.as_u64() as i64,
username,
)
.await?;
msg.reply(
ctx,
format!("added {} as your lastfm username!", lastfm.username),
)
.await?;
}
Ok(())
}
#[command]
#[description = "Show your currently playing track"]
#[num_args(0)]
#[bucket = "lastfm"]
pub async fn now(ctx: &Context, msg: &Message, _args: Args) -> CommandResult {
let lastfm = Lastfm::get(
&mut *get_client(&ctx).await?,
*msg.author.id.as_u64() as i64,
)
.await?;
// prepare for the lastfm api
let url = format!("http://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks&user={}&api_key={}&format=json",
lastfm.username,
*LASTFM_API_KEY);
let res: serde_json::Value = reqwest::get(&url).await?.json().await?;
// ignore the case where users only played a single title and there is no array
if let Some(tracks) = res
.pointer("/recenttracks/track")
.and_then(|a| a.as_array())
{
for t in tracks {
// here we have a boolean that only ever can be true, otherwise it is non existent, also, it is a string
if t.pointer("/@attr/nowplaying")
.and_then(|a| a.as_str())
.unwrap_or("")
== "true"
{
let content = format!(
"Artist: {} - {}",
t.pointer("/artist/#text")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Artist"),
t.pointer("/name")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Title")
);
msg.channel_id
.send_message(&ctx, |m| m.embed(|e| e.description(&content)))
.await?;
}
}
}
Ok(())
}
#[command]
#[description = "Show your recent tracks"]
#[num_args(0)]
#[bucket = "lastfm"]
pub async fn recent(ctx: &Context, msg: &Message, _args: Args) -> CommandResult {
let lastfm = Lastfm::get(
&mut *get_client(&ctx).await?,
*msg.author.id.as_u64() as i64,
)
.await?;
// prepare for the lastfm api
let url = format!("http://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks&user={}&api_key={}&format=json&limit=10",
lastfm.username,
*LASTFM_API_KEY);
let res: serde_json::Value = reqwest::get(&url).await?.json().await?;
let mut content = String::new();
// ignore the case where users only played a single title and there is no array
if let Some(tracks) = res
.pointer("/recenttracks/track")
.and_then(|a| a.as_array())
{
for t in tracks {
content.push_str(&format!(
"Artist: {} - {}\n",
t.pointer("/artist/#text")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Artist"),
t.pointer("/name")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Title"),
));
}
}
msg.channel_id
.send_message(&ctx, |m| m.embed(|e| e.description(&content)))
.await?;
Ok(())
}
#[command]
#[description = "Show your top artists"]
#[usage = "(all|7d|1m|3m|6m|12m)"]
#[example = "3m"]
#[min_args(0)]
#[max_args(1)]
#[bucket = "lastfm"]
pub async fn artists(ctx: &Context, msg: &Message, args: Args) -> CommandResult {
let period = match args.rest() {
"all" => "overall",
"7d" => "7day",
"1m" => "1month",
"3m" => "3month",
"6m" => "6month",
"12m" => "12month",
_ => "overall",
};
let lastfm = Lastfm::get(
&mut *get_client(&ctx).await?,
*msg.author.id.as_u64() as i64,
)
.await?;
// prepare for the lastfm api
let url = format!("http://ws.audioscrobbler.com/2.0/?method=user.gettopartists&user={}&api_key={}&format=json&limit=10&period={}",
lastfm.username,
*LASTFM_API_KEY,
period);
let res: serde_json::Value = reqwest::get(&url).await?.json().await?;
let mut content = String::new();
if let Some(artists) = res.pointer("/topartists/artist").and_then(|a| a.as_array()) {
for a in artists {
content.push_str(&format!(
"Rank: {} | {}\n",
a.pointer("/@attr/rank")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Rank"),
a.pointer("/name")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Artist"),
));
}
}
msg.channel_id
.send_message(&ctx, |m| m.embed(|e| e.description(&content)))
.await?;
Ok(())
}
#[command]
#[description = "Show your top albums"]
#[usage = "(all|7d|1m|3m|6m|12m)"]
#[example = "3m"]
#[min_args(0)]
#[max_args(1)]
#[bucket = "lastfm"]
pub async fn albums(ctx: &Context, msg: &Message, args: Args) -> CommandResult {
let period = match args.rest() {
"all" => "overall",
"7d" => "7day",
"1m" => "1month",
"3m" => "3month",
"6m" => "6month",
"12m" => "12month",
_ => "overall",
};
let lastfm = Lastfm::get(
&mut *get_client(&ctx).await?,
*msg.author.id.as_u64() as i64,
)
.await?;
// prepare for the lastfm api
let url = format!("http://ws.audioscrobbler.com/2.0/?method=user.gettopalbums&user={}&api_key={}&format=json&limit=10&period={}",
lastfm.username,
*LASTFM_API_KEY,
period);
let res: serde_json::Value = reqwest::get(&url).await?.json().await?;
let mut content = String::new();
if let Some(albums) = res.pointer("/topalbums/album").and_then(|a| a.as_array()) {
for a in albums {
content.push_str(&format!(
"Rank: {} | {}\n",
a.pointer("/@attr/rank")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Rank"),
a.pointer("/name")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Artist"),
));
}
}
msg.channel_id
.send_message(&ctx, |m| m.embed(|e| e.description(&content)))
.await?;
Ok(())
}
#[command]
#[description = "Show your top tracks"]
#[usage = "(all|7d|1m|3m|6m|12m)"]
#[example = "3m"]
#[min_args(0)]
#[max_args(1)]
#[bucket = "lastfm"]
pub async fn tracks(ctx: &Context, msg: &Message, args: Args) -> CommandResult {
let period = match args.rest() {
"all" => "overall",
"7d" => "7day",
"1m" => "1month",
"3m" => "3month",
"6m" => "6month",
"12m" => "12month",
_ => "overall",
};
info!("period: {:?}", period);
let lastfm = Lastfm::get(
&mut *get_client(&ctx).await?,
*msg.author.id.as_u64() as i64,
)
.await?;
// prepare for the lastfm api
let url = format!("http://ws.audioscrobbler.com/2.0/?method=user.gettoptracks&user={}&api_key={}&format=json&limit=10&period={}",
lastfm.username,
*LASTFM_API_KEY,
period);
let res: serde_json::Value = reqwest::get(&url).await?.json().await?;
let mut content = String::new();
if let Some(tracks) = res.pointer("/toptracks/track").and_then(|a| a.as_array()) {
for t in tracks {
content.push_str(&format!(
"Rank: {} | Played: {} | {} - {}\n",
t.pointer("/@attr/rank")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Rank"),
t.pointer("/playcount")
.and_then(|a| a.as_str())
.unwrap_or("-"),
t.pointer("/artist/name")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Artist"),
t.pointer("/name")
.and_then(|a| a.as_str())
.unwrap_or("Unknown Track"),
));
}
}
msg.channel_id
.send_message(&ctx, |m| m.embed(|e| e.description(&content)))
.await?;
Ok(())
}
| true |
e6469acff680c164da9c90a9b419cdbe8c030128
|
Rust
|
nakakura/study
|
/rust/rust_mutex_sample/src/main.rs
|
UTF-8
| 964 | 3.421875 | 3 |
[] |
no_license
|
use std::thread;
use std::time::Duration;
use std::sync::{Arc, Mutex};
pub struct Container{
pub packet: Arc<Mutex<Vec<u8>>>,
}
fn main() {
println!("Hello, world!");
let container: Container = Container{ packet: Arc::new(Mutex::new(vec!())) };
generate(&container, 0);
generate(&container, 100);
display(&container);
thread::sleep(Duration::from_millis(40000));
}
pub fn generate(container: &Container, prefix: u8) {
let client = container.packet.clone();
thread::spawn(move || {
let mut x = 1;
loop {
thread::sleep(Duration::from_millis(400));
client.lock().unwrap().push(prefix + x);
x += 1;
}
});
}
pub fn display(container: &Container) {
let client = container.packet.clone();
thread::spawn(move || {
loop {
let len = client.lock().unwrap().len();
if len > 0 {
let item = client.lock().unwrap().pop();
println!("item: {}", item.unwrap());
} else{
thread::sleep(Duration::from_millis(10));
}
}
});
}
| true |
75daa5beb867c6708c0ff62aa09c31f93c80a8ef
|
Rust
|
krixi/remud
|
/remud-lib/src/world/action/system.rs
|
UTF-8
| 2,371 | 2.59375 | 3 |
[
"MIT"
] |
permissive
|
use bevy_app::EventReader;
use bevy_ecs::prelude::*;
use itertools::Itertools;
use crate::world::{
action::{get_room_std, into_action, Action},
types::{
player::{Messages, Player},
room::Room,
Configuration, Location, Named,
},
};
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
pub struct Login {
pub actor: Entity,
}
into_action!(Login);
#[tracing::instrument(name = "login system", skip_all)]
pub fn login_system(
mut action_reader: EventReader<Action>,
location_query: Query<(Option<&Location>, Option<&Room>)>,
player_query: Query<&Named, With<Player>>,
room_query: Query<&Room>,
mut messages_query: Query<&mut Messages>,
) {
for action in action_reader.iter() {
if let Action::Login(Login { actor }) = action {
let room = get_room_std(*actor, &location_query);
let name = player_query
.get(*actor)
.map(|named| named.as_str())
.unwrap();
let players = room_query
.get(room)
.unwrap()
.players()
.iter()
.filter(|player| **player != *actor)
.copied()
.collect_vec();
let message = format!("{} arrives.", name);
for player in players {
if let Ok(mut messages) = messages_query.get_mut(player) {
messages.queue(message.clone());
}
}
}
}
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Restart {
pub actor: Entity,
}
into_action!(Restart);
#[tracing::instrument(name = "restart system", skip_all)]
pub fn restart_system(mut action_reader: EventReader<Action>, mut config: ResMut<Configuration>) {
for action in action_reader.iter() {
if let Action::Restart(Restart { .. }) = action {
config.restart = true
}
}
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Shutdown {
pub actor: Entity,
}
into_action!(Shutdown);
#[tracing::instrument(name = "shutdown system", skip_all)]
pub fn shutdown_system(mut action_reader: EventReader<Action>, mut config: ResMut<Configuration>) {
for action in action_reader.iter() {
if let Action::Shutdown(Shutdown { .. }) = action {
config.shutdown = true
}
}
}
| true |
e43f51462acf292eec48603f01d326db857d30e8
|
Rust
|
magurotuna/atcoder-submissions
|
/dp/src/bin/d.rs
|
UTF-8
| 781 | 2.609375 | 3 |
[] |
no_license
|
use libprocon::*;
fn main() {
let (N, W) = read!(usize, usize);
let mut wv: Vec<(usize, usize)> = Vec::with_capacity(N);
for _ in 0..N {
wv.push(read!(usize, usize));
}
// dp[i+1][w] := 番号iまでの品物の中から重さw以下になるように選ぶときの最大の価値
let mut dp = vec![vec![0usize; W + 1]; N + 1];
for i in 0..N {
for j in 0..=W {
// iを使わない場合の価値
let not_used = dp[i][j];
// iを使う場合の価値
let used = if wv[i].0 <= j {
dp[i][j - wv[i].0] + wv[i].1
} else {
0
};
dp[i + 1][j] = std::cmp::max(not_used, used);
}
}
println!("{}", dp[N][W]);
}
| true |
75180588dcc4e8cd2c11904aa5becb41da43c618
|
Rust
|
Aidiakapi/advent_of_code_2020
|
/framework/src/traits.rs
|
UTF-8
| 1,554 | 2.796875 | 3 |
[
"Unlicense"
] |
permissive
|
use crate::error::Error;
use arrayvec::ArrayVec;
pub trait IntoResult {
type Item;
type Error;
fn into_result(self) -> Result<Self::Item, Self::Error>;
}
impl<I, E> IntoResult for Result<I, E> {
type Item = I;
type Error = E;
fn into_result(self) -> Self {
self
}
}
// FIXME: Change this to return the never type when issues with this are fix
impl<I: IsNotResult> IntoResult for I {
type Item = I;
type Error = crate::error::Error;
fn into_result(self) -> Result<Self, crate::error::Error> {
Ok(self)
}
}
pub auto trait IsNotResult {}
impl<T, E> !IsNotResult for Result<T, E> {}
pub trait IntoError {
fn into_error(self) -> Error;
}
impl IntoError for ! {
fn into_error(self) -> Error {
unreachable!()
}
}
impl IntoError for Error {
fn into_error(self) -> Error {
self
}
}
pub trait Day {
fn nr(&self) -> u32;
fn evaluate(&self, input: String) -> ArrayVec<[(&'static str, Result<String, Error>); 2]>;
}
pub trait ResultWhereValueIsErrorExt {
type Type;
fn unwrap_either(self) -> Self::Type;
}
impl<T> ResultWhereValueIsErrorExt for std::result::Result<T, T> {
type Type = T;
fn unwrap_either(self) -> T {
match self {
Ok(x) => x,
Err(x) => x,
}
}
}
impl<'a, T> ResultWhereValueIsErrorExt for &'a std::result::Result<T, T> {
type Type = &'a T;
fn unwrap_either(self) -> Self::Type {
match self {
Ok(x) => x,
Err(x) => x,
}
}
}
| true |
1ac0b1f8586afea38dcd1a1978acd107fd94c5da
|
Rust
|
gluxon/wg-web-server
|
/src/controllers/network.rs
|
UTF-8
| 1,957 | 2.828125 | 3 |
[] |
no_license
|
use crate::states::WgState;
use askama::Template;
use failure;
use rocket::get;
use rocket::State;
use wireguard_uapi::get::Device;
#[derive(Template)]
#[template(path = "network/index.html")]
pub struct IndexTemplate {
device: Device,
}
#[get("/")]
pub fn index(wg: State<WgState>) -> Result<IndexTemplate, failure::Error> {
let device = wg.get_device()?;
Ok(IndexTemplate { device })
}
mod filters {
use askama::Error;
use base64;
use humantime;
use pretty_bytes;
use std::net::SocketAddr;
use std::time::{Duration, SystemTime};
use wireguard_uapi::get::AllowedIp;
pub fn base64_encode<T: ?Sized + AsRef<[u8]>>(input: &T) -> Result<String, Error> {
Ok(base64::encode(input))
}
pub fn endpoint(endpoint: &SocketAddr) -> Result<String, Error> {
Ok(format!("{}:{}", endpoint.ip(), endpoint.port()))
}
pub fn allowed_ips(ips: &[AllowedIp]) -> Result<String, Error> {
Ok(ips
.iter()
.map(|allowed_ip| format!("{}/{}", allowed_ip.ipaddr, allowed_ip.cidr_mask))
.collect::<Vec<String>>()
.join(", "))
}
pub fn last_handshake_time(last_handshake_time: &Duration) -> Result<String, Error> {
let difference = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.ok()
.and_then(|now| now.checked_sub(*last_handshake_time));
Ok(difference
// Filter out unnecessary precision beyond seconds
.map(|diff| Duration::new(diff.as_secs(), 0))
.map(|diff| humantime::format_duration(diff).to_string())
.unwrap_or_else(|| "Unknown".to_string()))
}
// The &u64 argument should be u64, but askama seems to require filters to pass references.
#[allow(clippy::trivially_copy_pass_by_ref)]
pub fn bytes(bytes: &u64) -> Result<String, Error> {
Ok(pretty_bytes::converter::convert(*bytes as f64))
}
}
| true |
7628251541c281b7b7fe3784f28c75ae476803b3
|
Rust
|
prestonTao/rust-example
|
/src/thread_safe/s2.rs
|
UTF-8
| 2,807 | 3.25 | 3 |
[] |
no_license
|
/*
在s1基础上添加了多个协程并行执行异步程序,但无法判断所有异步程序执行完成
所以run方法里面加了句等待一秒钟退出的方法,是保证所有异步方法全部执行完成。
*/
// use async_channel::{bounded, Receiver, Sender};
use crossbeam_channel::{Receiver, Sender, bounded, select, unbounded};
use async_io::Timer;
use std::time::Duration;
pub async fn run(){
example().await;
Timer::after(Duration::from_secs(1)).await;
}
// 此消息用于发送到与「主组件」并行运行的其他组件。
enum WorkMsg{
Work(u8),
Exit,
}
// 此消息用于从并行运行的其他组件 发送回「主组件」。
enum ResultMsg{
Result(u8),
Exited,
}
async fn example(){
let (work_sender, work_receiver) = bounded(100);
let (result_sender, result_receiver) = bounded(100);
// 生成子线程用于执行另一个并行组件
smol::spawn(dispatch(work_receiver, result_sender)).detach();
println!("111111111");
work_sender.send(WorkMsg::Work(1));
work_sender.send(WorkMsg::Work(2));
work_sender.send(WorkMsg::Work(3));
work_sender.send(WorkMsg::Work(4));
work_sender.send(WorkMsg::Work(5));
work_sender.send(WorkMsg::Exit);
println!("222222222222");
// worker执行计数
let mut counter = 0;
loop{
match result_receiver.recv() {
Ok(ResultMsg::Result(num)) => {
println!("接收到返回消息 start");
counter += 1;
println!("接收到返回消息 end");
},
Ok(ResultMsg::Exited) => {
println!("接收到Exit消息");
break;
},
_ => panic!("Error receiving a ResultMsg."),
}
}
println!("{}", counter);
println!("finish!");
}
async fn dispatch(receiver: Receiver<WorkMsg>, sender: Sender<ResultMsg>){
loop {
// 接收并处理消息,直到收到 exit 消息
match receiver.recv() {
Ok(WorkMsg::Work(num)) => {
// 执行一些工作,并且发送消息给 Result 队列
println!("收到任务消息 start");
smol::spawn(task(sender.clone(), num)).detach();
println!("收到任务消息 end");
},
Ok(WorkMsg::Exit) => {
// 发送 exit 确认消息
println!("收到Exit消息 start");
sender.send(ResultMsg::Exited);
println!("收到Exit消息 end");
break;
},
_ => {panic!("Error receiving a WorkMsg.")},
}
}
}
async fn task(sender: Sender<ResultMsg>, param: u8){
println!("开始工作 {}",param);
sender.send(ResultMsg::Result(param));
}
| true |
592dd422c9251a3142866c1a65879a1e7b3954aa
|
Rust
|
clarfonthey/bitvec
|
/src/mem.rs
|
UTF-8
| 3,879 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
#![doc = include_str!("../doc/mem.md")]
use core::{
cell::Cell,
mem,
};
use funty::Unsigned;
use radium::marker::BitOps;
#[doc = include_str!("../doc/mem/BitRegister.md")]
pub trait BitRegister: Unsigned + BitOps {
/// The number of bits required to store an index in the range `0 .. BITS`.
const INDX: u8 = bits_of::<Self>().trailing_zeros() as u8;
/// A mask over all bits that can be used as an index within the element.
/// This is the value with the least significant `INDX`-many bits set high.
const MASK: u8 = bits_of::<Self>() as u8 - 1;
/// The literal `!0`.
const ALL: Self;
}
/// Marks certain fundamentals as processor registers.
macro_rules! register {
($($t:ty),+ $(,)?) => { $(
impl BitRegister for $t {
const ALL: Self = !0;
}
)+ };
}
register!(u8, u16, u32);
/** `u64` can only be used as a register on processors whose word size is at
least 64 bits.
This implementation is not present on targets with 32-bit processor words.
**/
#[cfg(target_pointer_width = "64")]
impl BitRegister for u64 {
const ALL: Self = !0;
}
register!(usize);
/// Counts the number of bits in a value of type `T`.
pub const fn bits_of<T>() -> usize {
core::mem::size_of::<T>().saturating_mul(<u8>::BITS as usize)
}
#[doc = include_str!("../doc/mem/elts.md")]
pub const fn elts<T>(bits: usize) -> usize {
let width = bits_of::<T>();
if width == 0 {
return 0;
}
bits / width + (bits % width != 0) as usize
}
/// Tests if a type has alignment equal to its size.
#[doc(hidden)]
#[cfg(not(tarpaulin_include))]
pub const fn aligned_to_size<T>() -> bool {
mem::align_of::<T>() == mem::size_of::<T>()
}
/// Tests if two types have identical layouts (size and alignment are equal).
#[doc(hidden)]
#[cfg(not(tarpaulin_include))]
pub const fn layout_eq<T, U>() -> bool {
mem::align_of::<T>() == mem::align_of::<U>()
&& mem::size_of::<T>() == mem::size_of::<U>()
}
#[doc(hidden)]
#[repr(transparent)]
#[doc = include_str!("../doc/mem/BitElement.md")]
#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct BitElement<T = usize> {
pub elem: T,
}
/// Creates a `BitElement` implementation for an integer and its atomic/cell
/// variants.
macro_rules! element {
($($size:tt, $bare:ty => $atom:ident);+ $(;)?) => { $(
impl BitElement<$bare> {
/// Creates a new element wrapper from a raw integer.
pub const fn new(elem: $bare) -> Self {
Self {
elem,
}
}
}
impl BitElement<Cell<$bare>> {
/// Creates a new element wrapper from a raw integer.
pub const fn new(elem: $bare) -> Self {
Self {
elem: Cell::new(elem),
}
}
}
radium::if_atomic!( if atomic($size) {
use core::sync::atomic::$atom;
impl BitElement<$atom> {
/// Creates a new element wrapper from a raw integer.
pub const fn new(elem: $bare) -> Self {
Self {
elem: <$atom>::new(elem),
}
}
}
});
)+ };
}
element! {
8, u8 => AtomicU8;
16, u16 => AtomicU16;
32, u32 => AtomicU32;
}
#[cfg(target_pointer_width = "64")]
element!(64, u64 => AtomicU64);
element!(size, usize => AtomicUsize);
#[cfg(test)]
mod tests {
use super::*;
use crate::access::*;
#[test]
fn integer_properties() {
assert!(aligned_to_size::<u8>());
assert!(aligned_to_size::<BitSafeU8>());
assert!(layout_eq::<u8, BitSafeU8>());
assert!(aligned_to_size::<u16>());
assert!(aligned_to_size::<BitSafeU16>());
assert!(layout_eq::<u16, BitSafeU16>());
assert!(aligned_to_size::<u32>());
assert!(aligned_to_size::<BitSafeU32>());
assert!(layout_eq::<u32, BitSafeU32>());
assert!(aligned_to_size::<usize>());
assert!(aligned_to_size::<BitSafeUsize>());
assert!(layout_eq::<usize, BitSafeUsize>());
#[cfg(target_pointer_width = "64")]
{
assert!(aligned_to_size::<u64>());
assert!(aligned_to_size::<BitSafeU64>());
assert!(layout_eq::<u64, BitSafeU64>());
}
}
}
| true |
af28ffd8b500aa77ab7c6024c8dcf7cf715b2ecf
|
Rust
|
sunkaiiii/study_computer_science
|
/Learn_Rust/the_rust_programming_langauge/chapter15smart_pointer/src/main.rs
|
UTF-8
| 1,789 | 3.578125 | 4 |
[] |
no_license
|
use std::{cell::RefCell, rc::Rc};
enum List {
Cons(i32,Rc<List>),
Nil,
}
use crate::List::{Cons,Nil};
use crate::List2::{Cons2,Nil2};
#[derive(Debug)]
enum List2 {
Cons2(Rc<RefCell<i32>>,Rc<List2>),
Nil2,
}
fn main() {
// let list = Cons(1,Box::new(Cons(2,Box::new(Cons(3,Box::new(Nil))))));
//引用计数,完成类似图的样式
let a = Rc::new(Cons(5,Rc::new(Cons(10,Rc::new(Nil)))));
let b = Cons(3,Rc::clone(&a));
let c = Cons(4, Rc::clone(&a));
println!("count after creating c={}",Rc::strong_count(&a));
let x = 5;
let y = MyBox::new(x);
assert_eq!(5,x);
assert_eq!(5,*y); //解引用
//定义出拥有多个所有者 且能够进行修改的值
let value = Rc::new(RefCell::new(5));
let a = Rc::new(Cons2(Rc::clone(&value),Rc::new(Nil2)));
let b = Cons2(Rc::new(RefCell::new(6)),Rc::clone(&a));
let c = Cons2(Rc::new(RefCell::new(10)),Rc::clone(&a));
*value.borrow_mut() += 10;
println!("a after = {:?}", a);
println!("b after = {:?}", b);
println!("c after = {:?}", c);
let c = CustomSmartPointer { data: String::from("my stuff") };
let d = CustomSmartPointer { data: String::from("other stuff") };
println!("CustomSmartPointers created.");
std::mem::drop(c); //手动执行drop,也可以忽略路径,直接调用drop
//之后会执行drop 方法
}
use std::ops::Deref;
struct MyBox<T>(T);
impl <T> Deref for MyBox<T> {
type Target = T;
fn deref(&self)->&T{
&self.0
}
}
impl <T> MyBox<T> {
fn new(x:T)->MyBox<T>{
MyBox(x)
}
}
struct CustomSmartPointer{
data:String,
}
impl Drop for CustomSmartPointer {
fn drop(&mut self) {
println!("Dropping CustomSmartPointer with data '{}'",self.data);
}
}
| true |
afbabf78c7d74e0b5c7473127d920a006aaa44e3
|
Rust
|
japarado/zero2prod
|
/tests/api/subscriptions_confirm.rs
|
UTF-8
| 2,072 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
use reqwest::Url;
use wiremock::{Mock, ResponseTemplate};
use wiremock::matchers::{method, path};
use crate::helpers::spawn_app;
#[actix_rt::test]
async fn confirmations_without_token_are_rejected_with_a_400() {
let app = spawn_app().await;
let response = reqwest::get(&format!("{}/subscriptions/confirm", app.address))
.await
.unwrap();
assert_eq!(response.status().as_u16(), 400);
}
#[actix_rt::test]
pub async fn the_link_returned_by_subscribe_returns_a_200_if_called() {
let app = spawn_app().await;
let body = "name=le%20guin&email=ursula_le_guin%40gmail.com";
Mock::given(path("/email"))
.and(method("POST"))
.respond_with(ResponseTemplate::new(200))
.mount(&app.email_server)
.await;
app.post_subscriptions(body.into()).await;
let email_request = &app.email_server.received_requests().await.unwrap()[0];
let confirmation_links = app.get_confirmation_links(&email_request);
let response = reqwest::get(confirmation_links.html).await.unwrap();
assert_eq!(response.status().as_u16(), 200);
}
#[actix_rt::test]
async fn clicking_on_the_confirmation_link_confirms_a_subscriber() {
let app = spawn_app().await;
let body = "name=le%20guin&email=ursula_le_guin%40gmail.com";
Mock::given(path("/email"))
.and(method("POST"))
.respond_with(ResponseTemplate::new(200))
.mount(&app.email_server)
.await;
app.post_subscriptions(body.into()).await;
let email_request = &app.email_server.received_requests().await.unwrap()[0];
let confirmation_links = app.get_confirmation_links(&email_request);
reqwest::get(confirmation_links.html)
.await
.unwrap()
.error_for_status()
.unwrap();
let saved = sqlx::query!("SELECT email, name, status FROM subscriptions")
.fetch_one(&app.db_pool)
.await
.expect("Failed to fetch subscription.");
assert_eq!(saved.email, "ursula_le_guin@gmail.com");
assert_eq!(saved.name, "le guin");
assert_eq!(saved.status, "confirmed");
}
| true |
3d1a65bedd1adeaa9023600db772b2befc52070f
|
Rust
|
XZentus/rust
|
/guess_game/src/main.rs
|
UTF-8
| 948 | 3.125 | 3 |
[] |
no_license
|
extern crate rand;
use std::io::stdin;
use std::io::Write;
use std::io::stdout;
use rand::Rng;
use std::cmp::Ordering;
fn main() {
let snum = rand::thread_rng().gen_range(1, 101);
println!("guess_game>> rnd: {}", snum);
loop {
let mut guess = String::new();
print!("guess_game>> try: ");
match stdout().flush() {
Ok(_) => (),
Err(_) => (),
};
stdin().read_line(&mut guess)
.ok()
.expect("Fail read line");
let guess: u32 = match guess.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
match guess.cmp(&snum) {
Ordering::Less => println!("guess_game>> Less"),
Ordering::Equal => {
println!("guess_game>> Equal");
break;
}
Ordering::Greater => println!("guess_game>> Greater"),
}
}
}
| true |
2d92da0127ea799b2a72844039dbbe37fa147c64
|
Rust
|
wmmxk/OOP_Rust
|
/seq/main.rs
|
UTF-8
| 852 | 3.625 | 4 |
[] |
no_license
|
//source: https://blog.rust-lang.org/2015/04/17/Enums-match-mutation-and-moves.html
fn num_to_ordinal(x: u32) -> String {
let suffix;
match (x % 10, x % 100) {
(1, 1) | (1, 21...91) => {
suffix = "st";
}
(2, 2) | (2, 22...92) => {
suffix = "nd";
}
(3, 3) | (3, 23...93) => {
suffix = "rd";
}
_ => {
suffix = "th";
}
}
return format!("{}{}", x, suffix);
}
fn main() {
assert_eq!(num_to_ordinal( 0), "0th");
println!(" res: {}",num_to_ordinal( 1));
assert_eq!(num_to_ordinal( 12), "1th");
assert_eq!(num_to_ordinal( 22), "22nd");
assert_eq!(num_to_ordinal( 43), "43rd");
assert_eq!(num_to_ordinal( 67), "67th");
assert_eq!(num_to_ordinal(1901), "1901st");
}
| true |
cd5f80b47e9a34dba44f224df7b94235b5ab293a
|
Rust
|
merodeadorNocturno/rust_raytracer
|
/src/classes/plane.rs
|
UTF-8
| 754 | 3.21875 | 3 |
[] |
no_license
|
use crate::classes::vector::Vector;
use crate::interfaces::{ Ray, Surface, Intersection, Intersect };
pub struct Plane {
norm: Vector,
offset: f64,
}
impl Plane {
pub fn new(mut self, norm: &Vector, offset: f64, surface: Surface) {
self.norm = Vector::new(norm.x, norm.y, norm.z);
self.offset = offset;
}
fn normalize(self, pos: Vector) -> Vector {
self.norm
}
}
impl Intersect<Plane> for Plane {
fn intersect(self, ray: Ray) -> Intersection<Self> {
let denom = Vector::dot(&self.norm, &ray.dir);
let dist = (Vector::dot(&self.norm, &ray.start) + self.offset) / (-denom);
let mut is_null: bool = false;
if denom > 0.0 {
is_null = true;
}
Intersection { thing: self, ray, dist, is_null }
}
}
| true |
09fc70a351ccbc08a7ae85855832bfe6f171d37e
|
Rust
|
neelayjunnarkar/Proj_Euler
|
/src/prob5.rs
|
UTF-8
| 781 | 3.25 | 3 |
[] |
no_license
|
struct Range {
curr: i32,
next: i32,
step: i32,
end: i32,
}
impl Range {
fn new(start: i32, end: i32, step: i32) -> Range {
Range{curr: start, end: end, step: step, next: start+step}
}
}
impl Iterator for Range {
type Item = i32;
fn next(&mut self) -> Option<i32> {
self.curr = self.next;
self.next = self.next + self.step;
if self.step > 0 && self.curr > self.end {
None
} else if self.step < 0 && self.curr < self.end {
None
} else {
Some(self.curr)
}
}
}
pub fn run() -> String {
let mut c = 1;
'infin: loop {
for i in Range::new(20, 1, -1){
if c*20 % i != 0 {
c += 1;
continue 'infin;
}
}
break;
}
(c*20).to_string()
}
| true |
3635e46127c5a4ec712cad334c5fd62097fdfad3
|
Rust
|
andreasatle/Exercism
|
/rust/luhn/src/lib.rs
|
UTF-8
| 2,116 | 4.34375 | 4 |
[] |
no_license
|
//! Given a number determine whether or not it is valid per the Luhn formula.
//!
//! The Luhn algorithm is a simple checksum formula used to validate a variety of identification numbers, such as credit card numbers and Canadian Social Insurance Numbers.
//!
//! The task is to check if a given string is valid.
//!
//! Validating a Number
//! Strings of length 1 or less are not valid. Spaces are allowed in the input, but they should be stripped before checking. All other non-digit characters are disallowed.
//!
//! # Example 1: valid credit card number
//! 4539 3195 0343 6467
//! The first step of the Luhn algorithm is to double every second digit, starting from the right. We will be doubling
//!
//! ```comment
//! 4_3_ 3_9_ 0_4_ 6_6_
//! ```
//! If doubling the number results in a number greater than 9 then subtract 9 from the product. The results of our doubling:
//!
//! ```comment
//! 8569 6195 0383 3437
//! ```
//! Then sum all of the digits:
//!
//! ```comment
//! 8+5+6+9+6+1+9+5+0+3+8+3+3+4+3+7 = 80
//! ```
//! If the sum is evenly divisible by 10, then the number is valid. This number is valid!
//!
//! # Example 2: invalid credit card number
//! ```comment
//! 8273 1232 7352 0569
//! ```
//! Double the second digits, starting from the right
//!
//! ```comment
//! 7253 2262 5312 0539
//! ```
//! Sum the digits
//!
//! ```comment
//! 7+2+5+3+2+2+6+2+5+3+1+2+0+5+3+9 = 57
//! ```
//! 57 is not evenly divisible by 10, so this number is not valid.
/// Check whether a code has a valid luhn checksum.
pub fn is_valid(code: &str) -> bool {
let mut checksum = 0;
let mut cnt = 0;
for ch in code.chars().rev() {
if ch == ' ' {
continue
}
cnt += 1;
let opt_d = ch.to_digit(10);
match opt_d {
None => return false,
Some(mut d) => {
if cnt%2 == 0 {
d *= 2;
if d > 9 {
d -= 9;
}
}
checksum += d;
}
}
}
cnt > 1 && checksum%10 == 0
}
| true |
dd19e293bf7c1a7e5821e13fbb3df81e00fdc80e
|
Rust
|
aylei/leetcode-rust
|
/src/solution/s0042_trapping_rain_water.rs
|
UTF-8
| 1,020 | 3.203125 | 3 |
[
"Apache-2.0"
] |
permissive
|
/**
* [42] Trapping Rain Water
*
* Given n non-negative integers representing an elevation map where the width of each bar is 1, compute how much water it is able to trap after raining.
*
* <img src="https://assets.leetcode.com/uploads/2018/10/22/rainwatertrap.png" style="width: 412px; height: 161px;" /><br />
* <small>The above elevation map is represented by array [0,1,0,2,1,0,1,3,2,1,2,1]. In this case, 6 units of rain water (blue section) are being trapped. Thanks Marcos for contributing this image!</small>
*
* Example:
*
*
* Input: [0,1,0,2,1,0,1,3,2,1,2,1]
* Output: 6
*
*/
pub struct Solution {}
// problem: https://leetcode.com/problems/trapping-rain-water/
// discuss: https://leetcode.com/problems/trapping-rain-water/discuss/?currentPage=1&orderBy=most_votes&query=
// submission codes start here
// TODO
impl Solution {
pub fn trap(height: Vec<i32>) -> i32 {
0
}
}
// submission codes end
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_42() {}
}
| true |
132309b8476bbc31a2f97e6a316e3bc2daa1b482
|
Rust
|
boa-dev/boa
|
/boa_engine/src/object/builtins/jsarraybuffer.rs
|
UTF-8
| 7,468 | 3.296875 | 3 |
[
"MIT",
"Unlicense"
] |
permissive
|
//! A Rust API wrapper for Boa's `ArrayBuffer` Builtin ECMAScript Object
use crate::{
builtins::array_buffer::ArrayBuffer,
context::intrinsics::StandardConstructors,
error::JsNativeError,
object::{
internal_methods::get_prototype_from_constructor, JsObject, JsObjectType, ObjectData,
},
value::TryFromJs,
Context, JsResult, JsValue,
};
use boa_gc::{Finalize, Trace};
use std::ops::Deref;
/// `JsArrayBuffer` provides a wrapper for Boa's implementation of the ECMAScript `ArrayBuffer` object
#[derive(Debug, Clone, Trace, Finalize)]
pub struct JsArrayBuffer {
inner: JsObject,
}
impl JsArrayBuffer {
/// Create a new array buffer with byte length.
///
/// ```
/// # use boa_engine::{
/// # object::builtins::JsArrayBuffer,
/// # Context, JsResult
/// # };
/// # fn main() -> JsResult<()> {
/// # // Initialize context
/// # let context = &mut Context::default();
/// // Creates a blank array buffer of n bytes
/// let array_buffer = JsArrayBuffer::new(4, context)?;
///
/// assert_eq!(array_buffer.take()?, vec![0_u8; 4]);
///
/// # Ok(())
/// # }
/// ```
#[inline]
pub fn new(byte_length: usize, context: &mut Context<'_>) -> JsResult<Self> {
let inner = ArrayBuffer::allocate(
&context
.intrinsics()
.constructors()
.array_buffer()
.constructor()
.into(),
byte_length as u64,
context,
)?;
Ok(Self { inner })
}
/// Create a new array buffer from byte block.
///
/// This uses the passed byte block as the internal storage, it does not clone it!
///
/// The `byte_length` will be set to `byte_block.len()`.
///
/// ```
/// # use boa_engine::{
/// # object::builtins::JsArrayBuffer,
/// # Context, JsResult,
/// # };
/// # fn main() -> JsResult<()> {
/// # // Initialize context
/// # let context = &mut Context::default();
///
/// // Create a buffer from a chunk of data
/// let data_block: Vec<u8> = (0..5).collect();
/// let array_buffer = JsArrayBuffer::from_byte_block(data_block, context)?;
///
/// assert_eq!(array_buffer.take()?, (0..5).collect::<Vec<u8>>());
/// # Ok(())
/// # }
/// ```
pub fn from_byte_block(byte_block: Vec<u8>, context: &mut Context<'_>) -> JsResult<Self> {
let byte_length = byte_block.len();
let constructor = context
.intrinsics()
.constructors()
.array_buffer()
.constructor()
.into();
// 1. Let obj be ? OrdinaryCreateFromConstructor(constructor, "%ArrayBuffer.prototype%", « [[ArrayBufferData]], [[ArrayBufferByteLength]], [[ArrayBufferDetachKey]] »).
let prototype = get_prototype_from_constructor(
&constructor,
StandardConstructors::array_buffer,
context,
)?;
// 2. Let block be ? CreateByteDataBlock(byteLength).
//
// NOTE: We skip step 2. because we already have the block
// that is passed to us as a function argument.
let block = byte_block;
// 3. Set obj.[[ArrayBufferData]] to block.
// 4. Set obj.[[ArrayBufferByteLength]] to byteLength.
let obj = JsObject::from_proto_and_data_with_shared_shape(
context.root_shape(),
prototype,
ObjectData::array_buffer(ArrayBuffer {
array_buffer_data: Some(block),
array_buffer_byte_length: byte_length as u64,
array_buffer_detach_key: JsValue::Undefined,
}),
);
Ok(Self { inner: obj })
}
/// Create a [`JsArrayBuffer`] from a [`JsObject`], if the object is not an array buffer throw a `TypeError`.
///
/// This does not clone the fields of the array buffer, it only does a shallow clone of the object.
#[inline]
pub fn from_object(object: JsObject) -> JsResult<Self> {
if object.is_array_buffer() {
Ok(Self { inner: object })
} else {
Err(JsNativeError::typ()
.with_message("object is not an ArrayBuffer")
.into())
}
}
/// Returns the byte length of the array buffer.
///
/// ```
/// # use boa_engine::{
/// # object::builtins::JsArrayBuffer,
/// # Context, JsResult,
/// # };
/// # fn main() -> JsResult<()> {
/// # // Initialize context
/// # let context = &mut Context::default();
/// // Create a buffer from a chunk of data
/// let data_block: Vec<u8> = (0..5).collect();
/// let array_buffer = JsArrayBuffer::from_byte_block(data_block, context)?;
///
/// // Take the inner buffer
/// let buffer_length = array_buffer.byte_length(context);
///
/// assert_eq!(buffer_length, 5);
/// # Ok(())
/// # }
/// ```
#[inline]
pub fn byte_length(&self, context: &mut Context<'_>) -> usize {
ArrayBuffer::get_byte_length(&self.inner.clone().into(), &[], context)
.expect("it should not throw")
.as_number()
.expect("expected a number") as usize
}
/// Take the inner `ArrayBuffer`'s `array_buffer_data` field and replace it with `None`
///
/// Note: This causes the pre-existing `JsArrayBuffer` to become detached.
///
/// ```
/// # use boa_engine::{
/// # object::builtins::JsArrayBuffer,
/// # Context, JsResult,
/// # };
/// # fn main() -> JsResult<()> {
/// # // Initialize context
/// # let context = &mut Context::default();
/// // Create a buffer from a chunk of data
/// let data_block: Vec<u8> = (0..5).collect();
/// let array_buffer = JsArrayBuffer::from_byte_block(data_block, context)?;
///
/// // Take the inner buffer
/// let internal_buffer = array_buffer.take()?;
///
/// assert_eq!(internal_buffer, (0..5).collect::<Vec<u8>>());
///
/// // Anymore interaction with the buffer will return an error
/// let detached_err = array_buffer.take();
/// assert!(detached_err.is_err());
/// # Ok(())
/// # }
/// ```
#[inline]
pub fn take(&self) -> JsResult<Vec<u8>> {
self.inner
.borrow_mut()
.as_array_buffer_mut()
.expect("inner must be an ArrayBuffer")
.array_buffer_data
.take()
.ok_or_else(|| {
JsNativeError::typ()
.with_message("ArrayBuffer is detached")
.into()
})
}
}
impl From<JsArrayBuffer> for JsObject {
#[inline]
fn from(o: JsArrayBuffer) -> Self {
o.inner.clone()
}
}
impl From<JsArrayBuffer> for JsValue {
#[inline]
fn from(o: JsArrayBuffer) -> Self {
o.inner.clone().into()
}
}
impl Deref for JsArrayBuffer {
type Target = JsObject;
#[inline]
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl JsObjectType for JsArrayBuffer {}
impl TryFromJs for JsArrayBuffer {
fn try_from_js(value: &JsValue, _context: &mut Context<'_>) -> JsResult<Self> {
match value {
JsValue::Object(o) => Self::from_object(o.clone()),
_ => Err(JsNativeError::typ()
.with_message("value is not an ArrayBuffer object")
.into()),
}
}
}
| true |
861248d5b2f48c60b25bc38a5034a599884d0193
|
Rust
|
romdotdog/eretria
|
/tests/literals.rs
|
UTF-8
| 854 | 3.28125 | 3 |
[] |
no_license
|
use eretria::parse_string;
#[test]
fn ident() {
assert!(parse_string(&"fn main() hi").is_ok());
}
#[test]
fn number() {
assert!(parse_string("fn main() 23928392").is_ok());
}
#[test]
fn float() {
assert!(parse_string("fn main() 100.2").is_ok());
}
#[test]
fn negative_number() {
assert!(parse_string("fn main() -221111").is_ok());
}
#[test]
fn negative_float() {
assert!(parse_string("fn main() -202.5").is_ok());
}
#[test]
fn obscure_decimal_number() {
assert!(parse_string("fn main() 0d1234567890").is_ok());
}
#[test]
fn binary_number() {
assert!(parse_string("fn main() 0b10101001").is_ok());
}
#[test]
fn invalid_binary() {
assert!(parse_string("fn main() 0b1221").is_err());
}
#[test]
fn parentheses() {
assert!(parse_string("fn main() (-100)").is_ok());
}
| true |
a6edd197dc4bcfa50a2e44cfce25523c58565e0f
|
Rust
|
beevans/integrated-manager-for-lustre
|
/iml-agent-comms/src/session.rs
|
UTF-8
| 1,971 | 2.625 | 3 |
[
"GPL-1.0-or-later",
"MIT",
"BSD-3-Clause"
] |
permissive
|
// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use futures::lock::Mutex;
use iml_wire_types::{Fqdn, Id, ManagerMessage, PluginName};
use std::{collections::HashMap, sync::Arc};
use uuid::Uuid;
pub type Shared<T> = Arc<Mutex<T>>;
pub type Sessions = HashMap<PluginName, Session>;
pub type SharedSessions = Shared<Sessions>;
/// A bidirectional virtual channel between the manager and a remote agent plugin.
/// There may be many of these per remote host, and they are transient.
#[derive(Clone, Debug)]
pub struct Session {
pub fqdn: Fqdn,
pub id: Id,
pub plugin: PluginName,
}
impl std::fmt::Display for Session {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{:?}/{:?}/{:?}", self.fqdn, self.plugin, self.id,)
}
}
impl Session {
pub fn new(plugin: PluginName, fqdn: Fqdn) -> Self {
Self {
fqdn,
id: Id(Uuid::new_v4().to_hyphenated().to_string()),
plugin,
}
}
}
pub fn get_by_session_id<'a>(
plugin: &PluginName,
id: &Id,
sessions: &'a Sessions,
) -> Option<&'a Session> {
sessions.get(plugin).filter(|s| &s.id == id)
}
pub fn is_session_valid(msg: &ManagerMessage, sessions: &Sessions) -> bool {
let retain = match msg {
ManagerMessage::SessionTerminateAll { .. } => true,
ManagerMessage::SessionTerminate {
session_id, plugin, ..
}
| ManagerMessage::SessionCreateResponse {
session_id, plugin, ..
}
| ManagerMessage::Data {
session_id, plugin, ..
} => get_by_session_id(&plugin, &session_id, sessions).is_some(),
};
if !retain {
tracing::info!(
"Dropping message {:?} because it does not match any held session. Sessions: {:?}",
msg,
sessions
);
}
retain
}
| true |
86b41a424317a530bb21d644cf3f9ef7d12d4fe3
|
Rust
|
jdonszelmann/aoc2019
|
/src/day10/challenge1.rs
|
UTF-8
| 2,604 | 3.265625 | 3 |
[] |
no_license
|
use std::collections::HashMap;
fn main_func(input: &str) -> u64 {
let mut asteroids = vec![];
for (y, line) in input.lines().enumerate() {
for (x, value) in line.chars().enumerate() {
if value == '#' {
asteroids.push((x as f64 + 0.5, y as f64 + 0.5));
}
}
}
let mut highest = std::usize::MIN;
for asteroid1 in &asteroids {
let mut angles = vec![];
for asteroid2 in &asteroids {
let angle = (asteroid1.0 - asteroid2.0).atan2(asteroid1.1 - asteroid2.1);
angles.push((angle, asteroid1));
}
// idk, this is terrible but it works
let mut a = HashMap::new();
a.extend(angles.iter().map(|i| ((i.0 * 1_000_000f64) as i64, i.1)));
let length = a.len();
if length > highest {
highest = length;
}
}
highest as u64
}
#[cfg(test)]
mod test {
use crate::day10::challenge1::main_func;
#[test]
fn test_main_real() {
let input = include_str!("input");
let result = main_func(input);
assert_eq!(result, 284);
println!("challenge 10.1: {}", result);
}
#[test]
fn test_main_1() {
assert_eq!(main_func(".#..#\n.....\n#####\n....#\n...##"), 8);
}
#[test]
fn test_main_2() {
assert_eq!(
main_func(
"......#.#.
#..#.#....
..#######.
.#.#.###..
.#..#.....
..#....#.#
#..#....#.
.##.#..###
##...#..#.
.#....####"
),
33
);
}
#[test]
fn test_main_3() {
assert_eq!(
main_func(
".#..#..###
####.###.#
....###.#.
..###.##.#
##.##.#.#.
....###..#
..#.#..#.#
#..#.#.###
.##...##.#
.....#.#.."
),
41
);
}
#[test]
fn test_main_4() {
assert_eq!(
main_func(
".#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##"
),
210
);
}
#[test]
fn test_main_5() {
assert_eq!(
main_func(
".#....#####...#..
##...##.#####..##
##...#...#.#####.
..#.....#...###..
..#.#.....#....##"
),
30
);
}
}
| true |
7f11949474872a2b3e922feb89f69f3c9bffee2e
|
Rust
|
AridTag/d2rust
|
/src/d2assetsource.rs
|
UTF-8
| 2,960 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
use amethyst::assets::Source;
use amethyst::utils::application_root_dir;
use amethyst::{Error, Result};
use mpq::Archive;
use std::path::{Path, PathBuf};
pub const SOURCE_NAME: &str = "D2AssetSource";
pub struct D2AssetSource {
pub data_base_path: PathBuf,
mpq_sources: Vec<PathBuf>,
}
impl D2AssetSource {
pub fn new(data_base_path: &str) -> D2AssetSource {
let mut path = Path::new(data_base_path).to_path_buf();
if path.is_relative() {
path = application_root_dir().unwrap().join(path);
}
// TODO: Should return an error if the path doesn't exist
D2AssetSource {
data_base_path: path,
mpq_sources: vec![],
}
}
pub fn add_mpq(&mut self, mpq_path: &str) -> std::io::Result<()> {
let mut path = PathBuf::from(mpq_path);
if path.is_relative() {
path = self.data_base_path.join(path);
}
let path_str = path.into_os_string().into_string().unwrap();
if let Ok(_) = Archive::open(path_str.clone()) {
self.mpq_sources.push(PathBuf::from(path_str));
return Ok(());
}
Err(std::io::Error::from(std::io::ErrorKind::NotFound))
}
}
impl Source for D2AssetSource {
fn modified(&self, _path: &str) -> Result<u64> {
Ok(0)
}
fn load(&self, path_: &str) -> Result<Vec<u8>> {
let path = Path::new(path_);
if path.is_absolute() {
if path.exists() {
if let Ok(bytes) = std::fs::read(path.clone()) {
return Ok(bytes);
}
}
return Err(Error::from_string("Absolute path not found"));
}
if path.is_relative() {
// "loose" files will take priority over archives
let data_path = self.data_base_path.join(path);
if data_path.exists() {
if let Ok(bytes) = std::fs::read(data_path.clone()) {
return Ok(bytes);
} else {
return Err(Error::from_string("Relative path not found"));
}
}
}
let filename = path.to_str().unwrap();
// mpqs loaded later take priority over earlier loaded ones
for mpq_path in self.mpq_sources.iter().rev() {
if let Ok(mut archive) = Archive::open(mpq_path) {
if let Ok(file) = archive.open_file(filename) {
// Found it
let mut buf = vec![0u8; file.size() as usize];
if let Err(_) = file.read(&mut archive, &mut buf) {
return Err(Error::from_string("Failed to read file from mpq"));
}
return Ok(buf);
}
}
}
Err(Error::from_string("File not found"))
}
}
| true |
e94d009eaa93d93e46715c63215b0b762b97196b
|
Rust
|
knurling-rs/vs-code-gdb-defmt-example
|
/xtask/src/main.rs
|
UTF-8
| 3,364 | 2.5625 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::{
env,
io::{BufRead, BufReader},
path::{Path, PathBuf},
};
use anyhow::anyhow;
use duct::cmd;
const CARGO_TARGET: &str = "thumbv7em-none-eabihf";
const CRATE_NAME: &str = "app";
const OPENOCD_INTERFACE: &str = "jlink";
const OPENOCD_TARGET: &str = "nrf52";
const RTT_TCP_PORT: u16 = 8765;
fn main() -> Result<(), anyhow::Error> {
let args = env::args().skip(1).collect::<Vec<_>>();
let args = args.iter().map(|s| &**s).collect::<Vec<_>>();
env::set_current_dir(repo_root()?)?;
match &args[..] {
["gdb"] => gdb()?,
_ => println!("Cargo workflows
USAGE:
cargo xtask [COMMAND]
COMMANDS:
gdb spawns a GDB server; flashes and runs firmware; prints logs
"),
}
Ok(())
}
fn repo_root() -> Result<PathBuf, anyhow::Error> {
// path to this crate (the directory that contains this crate's Cargo.toml)
Ok(PathBuf::from(env::var("CARGO_MANIFEST_DIR")?)
// from there go one level up
.parent()
.unwrap()
.to_owned())
}
fn gdb() -> Result<(), anyhow::Error> {
const BP_LENGTH: u8 = 2; // breakpoint length
const RTT_BLOCK_IF_FULL: u32 = 2; // bit in `flags` field
const RTT_FLAGS: u32 = 44; // offset of `flags` field in control block
const RTT_ID: &str = "SEGGER RTT"; // control block ID
const RTT_SIZE: u8 = 48; // control block size
const THUMB_BIT: u32 = 1;
cmd!("cargo", "build", "--target", CARGO_TARGET).run()?;
let elf = Path::new("target")
.join(CARGO_TARGET)
.join("debug")
.join(CRATE_NAME);
// get symbol addresses from ELF
let nm = cmd!("nm", "-C", &elf).read()?;
let mut rtt = None;
let mut main = None;
for line in nm.lines() {
if line.ends_with("_SEGGER_RTT") {
rtt = line.splitn(2, ' ').next();
} else if line.ends_with("main") {
main = line.splitn(2, ' ').next();
}
}
let rtt = u32::from_str_radix(
rtt.ok_or_else(|| anyhow!("RTT control block not found"))?,
16,
)?;
let main = u32::from_str_radix(
main.ok_or_else(|| anyhow!("`main` function not found"))?,
16,
)? & !THUMB_BIT;
#[rustfmt::skip]
let openocd = cmd!(
"openocd",
"-d0",
"-c", format!("source [find interface/{}.cfg]", OPENOCD_INTERFACE),
"-c", "transport select swd",
"-c", format!("source [find target/{}.cfg]", OPENOCD_TARGET),
"-c", "init",
"-c", format!("rtt server start {} 0", RTT_TCP_PORT),
"-c", "reset init",
"-c", format!("flash write_image erase {}", elf.display()),
"-c", "reset halt",
"-c", format!("rtt setup {} {} {:?}", rtt, RTT_SIZE, RTT_ID),
"-c", format!("bp {} {} hw", main, BP_LENGTH),
"-c", "resume",
"-c", format!("mww {} {}", rtt + RTT_FLAGS, RTT_BLOCK_IF_FULL),
"-c", "rtt start",
)
.stderr_to_stdout()
.reader()?;
let mut lines = BufReader::new(openocd).lines();
while let Some(line) = lines.next() {
let line = line?;
println!("{}", line);
if line.contains("wrote") {
break;
}
}
cmd!("nc", "localhost", RTT_TCP_PORT.to_string())
.pipe(cmd!("defmt-print", "-e", &elf))
.run()?;
// close `openocd` *after* `nc`
drop(lines);
Ok(())
}
| true |
793871b96f59921051a4cac7c8d8e73ec87ccc65
|
Rust
|
liuchengxu/vim-clap
|
/crates/types/src/query.rs
|
UTF-8
| 1,415 | 3.078125 | 3 |
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
use crate::search_term::{ExactTerm, FuzzyTerm, InverseTerm, SearchTerm, TermType, WordTerm};
/// [`Query`] represents the structural search info parsed from the initial user input.
#[derive(Debug, Clone)]
pub struct Query {
pub word_terms: Vec<WordTerm>,
pub exact_terms: Vec<ExactTerm>,
pub fuzzy_terms: Vec<FuzzyTerm>,
pub inverse_terms: Vec<InverseTerm>,
}
impl<T: AsRef<str>> From<T> for Query {
fn from(query: T) -> Self {
let query = query.as_ref();
let mut word_terms = Vec::new();
let mut exact_terms = Vec::new();
let mut fuzzy_terms = Vec::new();
let mut inverse_terms = Vec::new();
for token in query.split_whitespace() {
let SearchTerm { ty, text } = token.into();
match ty {
TermType::Word => word_terms.push(WordTerm { text }),
TermType::Exact(term_ty) => exact_terms.push(ExactTerm::new(term_ty, text)),
TermType::Fuzzy(term_ty) => fuzzy_terms.push(FuzzyTerm::new(term_ty, text)),
TermType::Inverse(term_ty) => inverse_terms.push(InverseTerm::new(term_ty, text)),
}
}
Self {
word_terms,
exact_terms,
fuzzy_terms,
inverse_terms,
}
}
}
impl Query {
pub fn fuzzy_len(&self) -> usize {
self.fuzzy_terms.iter().map(|f| f.len()).sum()
}
}
| true |
861b4e396ffa99ae3a6f10f6c8c7b35057fab2b0
|
Rust
|
tomtau/tendermint-rs
|
/tendermint/src/lite/verifier.rs
|
UTF-8
| 6,294 | 2.875 | 3 |
[
"Apache-2.0"
] |
permissive
|
#[allow(clippy::all)]
use crate::lite::{Commit, Error, Header, Validator, ValidatorSet, ValidatorSetLookup, Vote};
use crate::Time;
use std::time::Duration;
/// Returns an error if the header has expired according to the given
/// trusting_period and current time. If so, the verifier must be reset subjectively.
/// NOTE: this doesn't belong here. It should be called by something that handles whether to trust
/// a verified commit. Verified here is really just about the header/commit/validators. Time is an
/// external concern :)
fn expired<H>(last_header: &H, trusting_period: Duration, now: Time) -> Result<(), Error>
where
H: Header,
{
if let Ok(passed) = now.duration_since(last_header.bft_time()) {
if passed > trusting_period {
return Err(Error::Expired);
}
}
// TODO move this out of the verifier and deal with overflows etc (proper err handling)
Ok(())
}
fn validate_next_vals<H, V>(header: H, next_vals: &V) -> Result<(), Error>
where
H: Header,
V: ValidatorSet,
{
// ensure the next validators in the header matches what was supplied.
if header.next_validators_hash() != next_vals.hash() {
return Err(Error::InvalidNextValidatorSet);
}
Ok(())
}
// Validate the validators and commit against the header.
fn validate_vals_and_commit<H, V, C>(header: H, commit: &C, vals: &V) -> Result<(), Error>
where
H: Header,
V: ValidatorSet,
C: Commit,
{
// ensure the validators in the header matches what we expect from our state.
if header.validators_hash() != vals.hash() {
return Err(Error::InvalidValidatorSet);
}
// ensure the commit matches the header.
if header.hash() != commit.header_hash() {
return Err(Error::InvalidCommitValue);
}
Ok(())
}
/// Verify the commit is valid from the given validators for the header.
pub fn verify<H, V, C>(header: H, commit: C, validators: V) -> Result<(), Error>
where
H: Header,
V: ValidatorSet,
C: Commit,
{
if let Err(e) = validate_vals_and_commit(header, &commit, &validators) {
return Err(e);
}
// ensure that +2/3 validators signed correctly
verify_commit_full(&validators, commit)
}
/// Verify the commit is trusted according to the last validators and is valid
/// from the current validators for the header.
pub fn verify_trusting<H, V, C>(
header: H,
commit: C,
last_validators: V,
validators: V,
) -> Result<(), Error>
where
H: Header,
V: ValidatorSetLookup,
C: Commit,
{
// NOTE it might be more prudent to do the cheap validations first
// before we even call verify_commit_trusting, but not doing that
// makes the code cleaner and allows us to just call verify directly.
// ensure that +1/3 of last trusted validators signed correctly
if let Err(e) = verify_commit_trusting(&last_validators, &commit) {
return Err(e);
}
// perform same verification as in sequential case
verify(header, commit, validators)
}
/// Verify that +2/3 of the correct validator set signed this commit.
/// NOTE: these validators are expected to be the correct validators for the commit.
fn verify_commit_full<V, C>(vals: &V, commit: C) -> Result<(), Error>
where
V: ValidatorSet,
C: Commit,
{
let total_power = vals.total_power();
let mut signed_power: u64 = 0;
let vals_vec = vals.into_vec();
let commit_vec = commit.into_vec();
if vals_vec.len() != commit_vec.len() {
return Err(Error::InvalidCommitLength);
}
// The vals and commit have a 1-to-1 correspondence.
// This means we don't need the validator IDs or to do any lookup,
// we can just zip the iterators.
let vals_iter = vals_vec.iter();
let commit_iter = commit_vec.iter();
for (val, vote_opt) in vals_iter.zip(commit_iter) {
// skip absent and nil votes
// NOTE: do we want to check the validity of votes
// for nil ?
let vote = match vote_opt {
Some(v) => v,
None => continue,
};
// check vote is valid from validator
if !val.verify_signature(vote.sign_bytes(), vote.signature()) {
return Err(Error::InvalidSignature);
}
signed_power += val.power();
}
// check the signers account for +2/3 of the voting power
if signed_power * 3 <= total_power * 2 {
return Err(Error::InsufficientVotingPower);
}
Ok(())
}
/// Verify that +1/3 of the given validator set signed this commit.
/// NOTE the given validators do not necessarily correspond to the validator set for this commit,
/// but there may be some intersection.
/// TODO: this should take a "trust_level" param to allow clients to require more
/// than +1/3. How should this be defined semantically? Probably shouldn't be a float, maybe
/// and enum of options, eg. 1/3, 1/2, 2/3, 1 ?
fn verify_commit_trusting<V, C>(validators: &V, commit: &C) -> Result<(), Error>
where
V: ValidatorSetLookup,
C: Commit,
{
let total_power = validators.total_power();
let mut signed_power: u64 = 0;
// NOTE we don't know the validators that committed this block,
// so we have to check for each vote if its validator is already known.
let commit_vec = commit.into_vec();
let commit_iter = commit_vec.iter();
for vote_opt in commit_iter {
// skip absent and nil votes
// NOTE: do we want to check the validity of votes
// for nil ?
let vote = match vote_opt {
Some(v) => v,
None => continue,
};
// check if this vote is from a known validator
let val_id = vote.validator_id();
let val = match validators.validator(val_id) {
Some(v) => v,
None => continue,
};
// check vote is valid from validator
if !val.verify_signature(vote.sign_bytes(), vote.signature()) {
return Err(Error::InvalidSignature);
}
signed_power += val.power();
}
// check the signers account for +1/3 of the voting power
// TODO: incorporate "trust_level" in here to possibly increase
// beyond 1/3.
if signed_power * 3 <= total_power {
return Err(Error::InsufficientVotingPower);
}
Ok(())
}
| true |
fd8bbc6a7e06c1381f6e0676052b05547ed1af9e
|
Rust
|
FlorianRohm/rusty_advent_of_code_2019
|
/intcode/src/lib.rs
|
UTF-8
| 21,778 | 3.109375 | 3 |
[] |
no_license
|
use crate::IntcodeReturnType::CodeError;
use crate::ParamMode::{Immediate, Position};
use crate::ProgramState::{Halted, Interrupted, Running};
use std::convert::{TryFrom, TryInto};
pub mod input;
#[derive(Debug, PartialEq)]
pub enum IntcodeReturnType {
CodeError,
IndexError,
Finished(IntcodeState),
Interrupted(IntcodeState),
}
impl IntcodeReturnType {
pub fn resume_with_input(self, input: i64) -> IntcodeReturnType {
if let IntcodeReturnType::Interrupted(mut state) = self {
state.input = input;
state.resume = true;
complete_intcode(state)
} else {
panic!("resume only on Interrupted state, used on {:?}", self)
}
}
}
#[derive(Debug, PartialEq, Default)]
pub struct IntcodeState {
pub code: Memory,
index: usize,
pub input: i64,
pub output: Vec<i64>,
resume: bool,
}
enum ProgramState {
Running(OpMode),
Halted,
Interrupted(OpMode),
}
enum ParamMode {
Position,
Immediate,
}
enum OpMode {
Add(ParamMode, ParamMode),
Mul(ParamMode, ParamMode),
Input,
Output(ParamMode),
JumpIfTrue(ParamMode, ParamMode),
JumpIfFalse(ParamMode, ParamMode),
LessThan(ParamMode, ParamMode),
Equals(ParamMode, ParamMode),
}
pub type Memory = Vec<i64>;
pub type IntcodeResult = std::result::Result<IntcodeState, IntcodeReturnType>;
impl IntcodeState {
pub fn from(code: Memory) -> IntcodeState {
IntcodeState {
code,
..IntcodeState::default()
}
}
pub fn with_next_input(code: Memory, input: i64) -> IntcodeState {
IntcodeState {
code,
input,
resume: true,
..IntcodeState::default()
}
}
pub fn set_next_input(self, input: i64) -> IntcodeState {
IntcodeState {
resume: true,
input,
..self
}
}
}
impl TryFrom<usize> for ParamMode {
type Error = IntcodeReturnType;
fn try_from(value: usize) -> Result<Self, Self::Error> {
match value {
0 => Ok(Position),
1 => Ok(Immediate),
_ => Err(CodeError),
}
}
}
trait TryToUsize {
fn to_usize(&self) -> Result<usize, IntcodeReturnType>;
}
impl TryToUsize for i64 {
fn to_usize(&self) -> Result<usize, IntcodeReturnType> {
self.clone()
.try_into()
.map_err(|_| IntcodeReturnType::IndexError)
}
}
impl ProgramState {
fn from_memory_location(input: i64) -> Result<Self, IntcodeReturnType> {
use OpMode::*;
assert!(input <= 99999);
let mut n: usize = input.try_into().map_err(|_| IntcodeReturnType::CodeError)?;
let op_mode = n % 100;
n /= 100;
let first_param = ParamMode::try_from(n % 10)?;
n /= 10;
let second_param = ParamMode::try_from(n % 10)?;
n /= 10;
let _third_param = ParamMode::try_from(n % 10)?;
match op_mode {
1 => Ok(Running(Add(first_param, second_param))),
2 => Ok(Running(Mul(first_param, second_param))),
3 => Ok(Interrupted(Input)),
4 => Ok(Running(Output(first_param))),
5 => Ok(Running(JumpIfTrue(first_param, second_param))),
6 => Ok(Running(JumpIfFalse(first_param, second_param))),
7 => Ok(Running(LessThan(first_param, second_param))),
8 => Ok(Running(Equals(first_param, second_param))),
99 => Ok(Halted),
_ => Err(IntcodeReturnType::CodeError),
}
}
}
pub fn run_instruction_set(memory: Memory) -> IntcodeReturnType {
complete_intcode(IntcodeState::from(memory))
}
pub fn run_instruction_set_with_input(memory: Memory, input: i64) -> IntcodeReturnType {
complete_intcode(IntcodeState::with_next_input(memory, input))
}
fn complete_intcode(mut intcode_state: IntcodeState) -> IntcodeReturnType {
loop {
intcode_state = match intcode_step(intcode_state) {
Ok(t) => t,
Err(return_type) => return return_type,
};
}
}
fn intcode_step(mut intcode_state: IntcodeState) -> IntcodeResult {
let index = intcode_state.index;
let instruction_field = get_index_value(&intcode_state.code, index)?;
let op_mode = match ProgramState::from_memory_location(instruction_field)? {
Running(op_mode) => op_mode,
Halted => return Err(IntcodeReturnType::Finished(intcode_state)),
Interrupted(op_mode) => {
if intcode_state.resume {
intcode_state.resume = false;
op_mode
} else {
return Err(IntcodeReturnType::Interrupted(intcode_state));
}
}
};
let new_state = process_op_mode(intcode_state, op_mode)?;
Ok(new_state)
}
fn process_op_mode(mut intcode_state: IntcodeState, op_mode: OpMode) -> IntcodeResult {
let index = intcode_state.index;
let new_state = match op_mode {
OpMode::Add(mode_1, mode_2) => {
op_modes_3_inputs(intcode_state, mode_1, mode_2, |a, b| a + b)?
}
OpMode::Mul(mode_1, mode_2) => {
op_modes_3_inputs(intcode_state, mode_1, mode_2, |a, b| a * b)?
}
OpMode::Input => {
intcode_state.code =
try_set_at_index_location(intcode_state.code, index + 1, intcode_state.input)?;
intcode_state.index += 2;
intcode_state
}
OpMode::Output(mode) => {
let output = get_value_at_index_location(&intcode_state.code, index + 1, &mode)?;
intcode_state.output.push(output);
intcode_state.index += 2;
intcode_state
}
OpMode::JumpIfTrue(mode_1, mode_2) => {
match get_value_at_index_location(&intcode_state.code, index + 1, &mode_1)? {
0 => intcode_state.index += 3,
_ => {
let target =
get_value_at_index_location(&intcode_state.code, index + 2, &mode_2)?;
intcode_state.index = target.to_usize()?;
}
};
intcode_state
}
OpMode::JumpIfFalse(mode_1, mode_2) => {
match get_value_at_index_location(&intcode_state.code, index + 1, &mode_1)? {
0 => {
let target =
get_value_at_index_location(&intcode_state.code, index + 2, &mode_2)?;
intcode_state.index = target.to_usize()?;
}
_ => intcode_state.index += 3,
};
intcode_state
}
OpMode::LessThan(mode_1, mode_2) => op_modes_3_inputs(
intcode_state,
mode_1,
mode_2,
|a, b| if a < b { 1 } else { 0 },
)?,
OpMode::Equals(mode_1, mode_2) => op_modes_3_inputs(
intcode_state,
mode_1,
mode_2,
|a, b| if a == b { 1 } else { 0 },
)?,
};
Ok(new_state)
}
fn op_modes_3_inputs(
mut intcode_state: IntcodeState,
mode_1: ParamMode,
mode_2: ParamMode,
operation: impl Fn(i64, i64) -> i64,
) -> IntcodeResult {
let index = intcode_state.index;
let operand_1 = get_value_at_index_location(&intcode_state.code, index + 1, &mode_1)?;
let operand_2 = get_value_at_index_location(&intcode_state.code, index + 2, &mode_2)?;
intcode_state.code = try_set_at_index_location(
intcode_state.code,
index + 3,
operation(operand_1, operand_2),
)?;
intcode_state.index += 4;
Ok(intcode_state)
}
fn get_index_value(code: &Memory, index: usize) -> Result<i64, IntcodeReturnType> {
Ok(code
.get(index)
.ok_or(IntcodeReturnType::IndexError)?
.to_owned())
}
fn get_value_at_index_location(
code: &Memory,
index: usize,
mode: &ParamMode,
) -> Result<i64, IntcodeReturnType> {
let index_value = get_index_value(code, index)?;
match mode {
Immediate => Ok(index_value as i64),
Position => {
let i: usize = index_value.to_usize()?;
Ok(code.get(i).ok_or(IntcodeReturnType::IndexError)?.to_owned())
}
}
}
fn try_set_at_index_location(
mut code: Memory,
index: usize,
value: i64,
) -> Result<Memory, IntcodeReturnType> {
let target_index: usize = code
.get(index)
.ok_or(IntcodeReturnType::IndexError)?
.to_owned()
.to_usize()?;
code.get(target_index)
.ok_or(IntcodeReturnType::IndexError)?;
code[target_index] = value;
Ok(code)
}
#[cfg(test)]
mod tests {
use super::*;
impl IntcodeState {
fn from_all(code: Memory, index: usize, input: i64, output: Vec<i64>) -> IntcodeState {
IntcodeState {
code,
index,
input,
output,
resume: false,
}
}
}
mod test_step {
use super::*;
#[test]
fn test_intcode_step_add() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1, 0, 0, 0])),
Ok(IntcodeState::from_all(vec![2, 0, 0, 0], 4, 0, vec![]))
);
}
#[test]
fn test_intcode_step_mul() {
assert_eq!(
intcode_step(IntcodeState::from(vec![2, 0, 0, 0])),
Ok(IntcodeState::from_all(vec![4, 0, 0, 0], 4, 0, vec![]))
);
}
#[test]
fn test_intcode_step_add_2() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1, 0, 0, 3])),
Ok(IntcodeState::from_all(vec![1, 0, 0, 2], 4, 0, vec![]))
);
}
#[test]
fn test_intcode_step_mul_2() {
assert_eq!(
intcode_step(IntcodeState::from(vec![2, 0, 0, 2])),
Ok(IntcodeState::from_all(vec![2, 0, 4, 2], 4, 0, vec![]))
);
}
#[test]
fn test_intcode_step_err_index_1() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1, 5, 0, 1])),
Err(IntcodeReturnType::IndexError)
);
}
#[test]
fn test_intcode_step_err_index_2() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1, 0, 5, 1])),
Err(IntcodeReturnType::IndexError)
);
}
#[test]
fn test_intcode_step_err_index_3() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1, 0, 0, 5])),
Err(IntcodeReturnType::IndexError)
);
}
#[test]
fn test_intcode_return() {
assert_eq!(
intcode_step(IntcodeState::from(vec![99, 0, 0, 5])),
Err(IntcodeReturnType::Finished(IntcodeState::from_all(
vec![99, 0, 0, 5],
0,
0,
vec![],
)))
);
}
#[test]
fn test_intcode_step_input() {
assert_eq!(
intcode_step(IntcodeState::with_next_input(vec![3, 0], 5)),
Ok(IntcodeState::from_all(vec![5, 0], 2, 5, vec![]))
);
}
#[test]
fn test_intcode_step_input_continue() {
let mut intcode = IntcodeState::with_next_input(vec![3, 0], 5);
intcode.resume = false;
assert_eq!(
intcode_step(intcode),
Err(IntcodeReturnType::Interrupted(IntcodeState::from_all(
vec![3, 0],
0,
5,
vec![]
)))
);
}
#[test]
fn test_intcode_step_output() {
assert_eq!(
intcode_step(IntcodeState::from(vec![4, 1])),
Ok(IntcodeState::from_all(vec![4, 1], 2, 0, vec![1]))
);
assert_eq!(
intcode_step(IntcodeState::from(vec![4, 0])),
Ok(IntcodeState::from_all(vec![4, 0], 2, 0, vec![4]))
);
}
#[test]
fn test_intcode_step_parameter_mode_mul() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1002, 4, 3, 4, 33])),
Ok(IntcodeState::from_all(
vec![1002, 4, 3, 4, 99],
4,
0,
vec![]
))
);
}
#[test]
fn test_intcode_step_negative() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1101, 100, -1, 4, 0])),
Ok(IntcodeState::from_all(
vec![1101, 100, -1, 4, 99],
4,
0,
vec![]
))
);
}
#[test]
fn test_intcode_step_parameter_mode_add() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1101, 4, 3, 4, 33])),
Ok(IntcodeState::from_all(vec![1101, 4, 3, 4, 7], 4, 0, vec![]))
);
}
#[test]
fn test_intcode_step_jump_if_not_0_ok() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1105, 1, 5, 4, 33])),
Ok(IntcodeState::from_all(
vec![1105, 1, 5, 4, 33],
5,
0,
vec![]
))
);
}
#[test]
fn test_intcode_step_jump_if_not_0_not() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1105, 0, 3, 4, 33])),
Ok(IntcodeState::from_all(
vec![1105, 0, 3, 4, 33],
3,
0,
vec![]
))
);
}
#[test]
fn test_intcode_step_jump_if_0_not() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1106, 1, 3, 4, 33])),
Ok(IntcodeState::from_all(
vec![1106, 1, 3, 4, 33],
3,
0,
vec![]
))
);
}
#[test]
fn test_intcode_step_jump_if_0_ok() {
assert_eq!(
intcode_step(IntcodeState::from(vec![1106, 0, 5, 4, 33])),
Ok(IntcodeState::from_all(
vec![1106, 0, 5, 4, 33],
5,
0,
vec![]
))
);
}
#[test]
fn test_intcode_step_parameter_mode_out() {
assert_eq!(
intcode_step(IntcodeState::from(vec![104, 55, 3, 4, 33])),
Ok(IntcodeState::from_all(
vec![104, 55, 3, 4, 33],
2,
0,
vec![55]
))
);
}
}
mod test_complete {
use super::*;
#[test]
fn test_intcode_index_error_1() {
assert_eq!(
complete_intcode(IntcodeState::from(vec![1, 0, 0, 0])),
IntcodeReturnType::IndexError
);
}
#[test]
fn test_intcode_index_error_2() {
assert_eq!(
complete_intcode(IntcodeState::from(vec![1, 0, 0, 0, 1, 34, 4, 5])),
IntcodeReturnType::IndexError
);
}
#[test]
fn test_intcode_website() {
assert_eq!(
complete_intcode(IntcodeState::from(vec![
1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50
])),
IntcodeReturnType::Finished(IntcodeState::from_all(
vec![3500, 9, 10, 70, 2, 3, 11, 0, 99, 30, 40, 50],
8,
0,
vec![],
))
);
assert_eq!(
complete_intcode(IntcodeState::from(vec![1, 0, 0, 0, 99])),
IntcodeReturnType::Finished(IntcodeState::from_all(
vec![2, 0, 0, 0, 99],
4,
0,
vec![],
))
);
assert_eq!(
complete_intcode(IntcodeState::from(vec![2, 3, 0, 3, 99])),
IntcodeReturnType::Finished(IntcodeState::from_all(
vec![2, 3, 0, 6, 99],
4,
0,
vec![],
))
);
assert_eq!(
complete_intcode(IntcodeState::from(vec![2, 4, 4, 5, 99, 0])),
IntcodeReturnType::Finished(IntcodeState::from_all(
vec![2, 4, 4, 5, 99, 9801],
4,
0,
vec![],
))
);
assert_eq!(
complete_intcode(IntcodeState::from(vec![1, 1, 1, 4, 99, 5, 6, 0, 99])),
IntcodeReturnType::Finished(IntcodeState::from_all(
vec![30, 1, 1, 4, 2, 5, 6, 0, 99],
8,
0,
vec![],
))
);
}
#[test]
fn test_intcodes_day5_equals() {
// Using position mode, consider whether the input is equal to 8; output 1 (if it is) or 0 (if it is not)
let input_equal_8 = || vec![3, 9, 8, 9, 10, 9, 4, 9, 99, -1, 8];
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_equal_8(), 8)),
vec![1],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_equal_8(), 9)),
vec![0],
);
// Using position mode, consider whether the input is less than 8; output 1 (if it is) or 0 (if it is not).
let input_less_than_8 = || vec![3, 9, 7, 9, 10, 9, 4, 9, 99, -1, 8];
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_less_than_8(), 9)),
vec![0],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_less_than_8(), 5)),
vec![1],
);
// Using immediate mode, consider whether the input is equal to 8; output 1 (if it is) or 0 (if it is not).
let input_equal_8 = || vec![3, 3, 1108, -1, 8, 3, 4, 3, 99];
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_equal_8(), 8)),
vec![1],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_equal_8(), 9)),
vec![0],
);
// Using immediate mode, consider whether the input is less than 8; output 1 (if it is) or 0 (if it is not).
let input_less_than_8 = || vec![3, 3, 1107, -1, 8, 3, 4, 3, 99];
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_less_than_8(), 9)),
vec![0],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_less_than_8(), 5)),
vec![1],
);
}
#[test]
fn test_intcodes_day5_jumps() {
// Here are some jump tests that take an input, then output 0 if the input was zero or 1 if the input was non-zero:
let input = || vec![3, 12, 6, 12, 15, 1, 13, 14, 13, 4, 13, 99, -1, 0, 1, 9];
let input_2 = || vec![3, 3, 1105, -1, 9, 1101, 0, 0, 12, 4, 12, 99, 1];
test_for_output(
complete_intcode(IntcodeState::with_next_input(input(), 0)),
vec![0],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_2(), 0)),
vec![0],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input(), 5)),
vec![1],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input_2(), 7)),
vec![1],
);
}
#[test]
fn test_intcodes_day5_big() {
// Here are some jump tests that take an input, then output 0 if the input was zero or 1 if the input was non-zero:
let input = || {
vec![
3, 21, 1008, 21, 8, 20, 1005, 20, 22, 107, 8, 21, 20, 1006, 20, 31, 1106, 0,
36, 98, 0, 0, 1002, 21, 125, 20, 4, 20, 1105, 1, 46, 104, 999, 1105, 1, 46,
1101, 1000, 1, 20, 4, 20, 1105, 1, 46, 98, 99,
]
};
test_for_output(
complete_intcode(IntcodeState::with_next_input(input(), 0)),
vec![999],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input(), 8)),
vec![1000],
);
test_for_output(
complete_intcode(IntcodeState::with_next_input(input(), 9)),
vec![1001],
);
}
fn test_for_output(return_type: IntcodeReturnType, output: Vec<i64>) {
if let IntcodeReturnType::Finished(state) = return_type {
assert_eq!(state.output, output)
} else {
assert!(false, format!("wrong enum variant {:?}", return_type))
}
}
}
}
| true |
9eed5be60420f64014d306d42c25cfc07e2621f7
|
Rust
|
jjmark15/rust_trait_fun
|
/src/super_traits.rs
|
UTF-8
| 1,635 | 3.734375 | 4 |
[] |
no_license
|
trait Person {
fn name(&self) -> &String;
}
trait Housemate: Person {
fn room_number(&self) -> u32;
}
trait Programmer {
fn fave_language(&self) -> String {
"Rust".to_owned()
}
}
trait ProgrammerHousemate: Housemate + Programmer {}
#[allow(dead_code)]
fn programmer_housemate_greeter(housemate: &dyn ProgrammerHousemate) -> String {
format!(
"Hi, my name is {}, I live in room {} and I love {}!",
housemate.name(),
housemate.room_number(),
housemate.fave_language()
)
}
struct GraduateProgrammer {
name: String,
favourite_language: String,
}
impl GraduateProgrammer {
#[allow(dead_code)]
fn new(name: String, fave_lang: String) -> GraduateProgrammer {
GraduateProgrammer {
name,
favourite_language: fave_lang,
}
}
}
impl ProgrammerHousemate for GraduateProgrammer {}
impl Housemate for GraduateProgrammer {
fn room_number(&self) -> u32 {
1
}
}
impl Person for GraduateProgrammer {
fn name(&self) -> &String {
&self.name
}
}
impl Programmer for GraduateProgrammer {
fn fave_language(&self) -> String {
(&self.favourite_language).parse().unwrap()
}
}
#[cfg(test)]
mod tests {
use crate::super_traits::{programmer_housemate_greeter, GraduateProgrammer};
#[test]
fn test_programmer_housemate_greeter() {
let josh = GraduateProgrammer::new("Josh".to_string(), "Rust".to_owned());
assert_eq!(
"Hi, my name is Josh, I live in room 1 and I love Rust!",
programmer_housemate_greeter(&josh)
)
}
}
| true |
1e1e21ed0ec36b7890d40e3a72a8939c3ba984e8
|
Rust
|
tiger1710/didactic-octo-funicular
|
/source/rust-code/acmicpc/10900/10989.rs
|
UTF-8
| 657 | 3.078125 | 3 |
[] |
no_license
|
use std::io;
use std::io::Write;
fn main() {
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap(); // scanf
let n = input.trim().parse::<usize>().unwrap();
let mut nums: [usize; 10000] = [0; 10000];
for _ in 0..n {
input.clear();
io::stdin().read_line(&mut input).unwrap(); // scanf
nums[input.trim().parse::<usize>().unwrap() - 1] += 1;
}
let mut output = io::BufWriter::with_capacity(4096000, io::stdout());
for i in 0..10000 {
if 0 < nums[i] {
for _ in 0..nums[i] {
writeln!(output, "{}", i + 1).unwrap();
}
}
}
}
| true |
66f99f2e50bdee69baf56f21a0880f291933f54e
|
Rust
|
rapodaca/chemcore
|
/src/daylight/smiles/read/pi_subgraph.rs
|
UTF-8
| 4,455 | 2.75 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use gamma::graph::{DefaultGraph, Graph};
use purr::graph::{Atom, Bond};
use purr::parts::BondKind;
pub fn pi_subgraph(atoms: &Vec<Atom>) -> DefaultGraph {
let mut result = DefaultGraph::new();
let mut subvalences = vec![];
for (index, atom) in atoms.iter().enumerate() {
let subvalence = atom.subvalence();
if atom.is_aromatic() && subvalence > 0 {
result.add_node(index).expect("add node");
}
subvalences.push(subvalence);
}
for (sid, source) in atoms.iter().enumerate() {
for Bond { tid, kind } in source.bonds.iter() {
if *tid < sid {
continue;
}
match kind {
BondKind::Elided => {
if result.has_id(sid) && result.has_id(*tid) {
result.add_edge(sid, *tid).expect("add edge")
}
}
BondKind::Aromatic => {
if subvalences[sid] > 0 {
if !result.has_id(sid) {
result.add_node(sid).expect("add source");
}
if subvalences[*tid] > 0 {
if !result.has_id(*tid) {
result.add_node(*tid).expect("add target");
}
result.add_edge(sid, *tid).expect("add edge")
}
} else if subvalences[*tid] > 0 {
if !result.has_id(*tid) {
result.add_node(*tid).expect("add target");
}
}
}
_ => (),
}
}
}
result
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use pretty_assertions::assert_eq;
use purr::graph::from_tree;
use purr::read::read;
use super::*;
#[test]
fn methane() {
let atoms = from_tree(read("C").unwrap().root).unwrap();
assert_eq!(pi_subgraph(&atoms), DefaultGraph::new())
}
#[test]
fn methane_aromatic() {
let atoms = from_tree(read("c").unwrap().root).unwrap();
assert_eq!(
pi_subgraph(&atoms),
DefaultGraph::try_from(vec![vec![]]).unwrap()
)
}
#[test]
fn ethane() {
let atoms = from_tree(read("CC").unwrap().root).unwrap();
assert_eq!(pi_subgraph(&atoms), DefaultGraph::new())
}
#[test]
fn ethene_aromatic_atoms() {
let atoms = from_tree(read("cc").unwrap().root).unwrap();
assert_eq!(
pi_subgraph(&atoms),
DefaultGraph::try_from(vec![vec![1], vec![0]]).unwrap()
)
}
#[test]
fn propene_aromatic_atoms() {
let atoms = from_tree(read("ccC").unwrap().root).unwrap();
assert_eq!(
pi_subgraph(&atoms),
DefaultGraph::try_from(vec![vec![1], vec![0]]).unwrap()
)
}
#[test]
fn carbon_iron_aromatic() {
let atoms = from_tree(read("C:[Fe]").unwrap().root).unwrap();
assert_eq!(
pi_subgraph(&atoms),
DefaultGraph::try_from(vec![vec![]]).unwrap()
)
}
#[test]
fn iron_carbon_aromatic() {
let atoms = from_tree(read("[Fe]:C").unwrap().root).unwrap();
let mut result = DefaultGraph::new();
result.add_node(1).unwrap();
assert_eq!(pi_subgraph(&atoms), result)
}
#[test]
fn furan_all_aromatic() {
let atoms = from_tree(read("c1ccco1").unwrap().root).unwrap();
assert_eq!(
pi_subgraph(&atoms),
DefaultGraph::try_from(vec![(0, 1), (1, 2), (2, 3)]).unwrap()
)
}
#[test]
fn pyrrole_all_aromatic() {
let atoms = from_tree(read("c1ccc[nH]1").unwrap().root).unwrap();
assert_eq!(
pi_subgraph(&atoms),
DefaultGraph::try_from(vec![(0, 1), (1, 2), (2, 3)]).unwrap()
)
}
#[test]
fn benzene_all_aromatic() {
let atoms = from_tree(read("c1ccccc1").unwrap().root).unwrap();
assert_eq!(
pi_subgraph(&atoms),
DefaultGraph::try_from(vec![
vec![5, 1],
vec![0, 2],
vec![1, 3],
vec![2, 4],
vec![3, 5],
vec![0, 4]
])
.unwrap()
)
}
}
| true |
7a238aa80d4d271d817dd5c49d9112b8d34d9d8b
|
Rust
|
cleech/adventofcode
|
/src/day05/mod.rs
|
UTF-8
| 2,247 | 3.21875 | 3 |
[
"MIT"
] |
permissive
|
const DATA: &'static str = include_str!("input.txt");
pub fn main() -> Vec<String> {
let s1 = nice_count(DATA);
let s2 = nicer_count(DATA);
vec![s1.to_string(), s2.to_string()]
}
fn nice(s: &str) -> bool {
let has_three_vowels = |s: &str| {
s.chars()
.filter(|&c| {
match c {
'a' | 'e' | 'i' | 'o' | 'u' => true,
_ => false,
}
})
.count() >= 3
};
let has_doubles = |s: &str| {
s.chars()
.collect::<Vec<_>>()
.windows(2)
.any(|w| w[0] == w[1])
};
let has_bad_pairs = |s: &str| {
s.chars()
.collect::<Vec<_>>()
.windows(2)
.any(|w| {
match w {
['a', 'b'] | ['c', 'd'] | ['p','q'] | ['x', 'y'] => true,
_ => false,
}
})
};
has_three_vowels(s) && has_doubles(s) && !has_bad_pairs(s)
}
fn nice_count(input: &str) -> usize {
input.lines()
.filter(|line| nice(line))
.count()
}
fn nicer(s: &str) -> bool {
let has_double_pairs = |s: &str| {
let bs = s.chars().collect::<Vec<_>>();
(0..bs.len() - 3).any(|n| {
bs[n + 2..]
.windows(2)
.any(|w| w[0] == bs[n] && w[1] == bs[n + 1])
})
};
let has_sandwich = |s: &str| {
s.chars()
.collect::<Vec<_>>()
.windows(3)
.any(|w| w[0] == w[2])
};
has_double_pairs(s) && has_sandwich(s)
}
fn nicer_count(input: &str) -> usize {
input.lines()
.filter(|line| nicer(line))
.count()
}
#[cfg(test)]
mod test {
use super::{nice, nicer};
#[test]
fn examples_1() {
assert_eq!(nice("ugknbfddgicrmopn"), true);
assert_eq!(nice("aaa"), true);
assert_eq!(nice("jchzalrnumimnmhp"), false);
assert_eq!(nice("haegwjzuvuyypxyu"), false);
assert_eq!(nice("dvszwmarrgswjxmb"), false);
}
#[test]
fn examples_2() {
assert_eq!(nicer("qjhvhtzxzqqjkmpb"), true);
assert_eq!(nicer("xxyxx"), true);
assert_eq!(nicer("uurcxstgmygtbstg"), false);
assert_eq!(nicer("eodomkazucvgmuy"), false);
}
}
| true |
4eb8a48a85175b82c2845f2855ae9d384848266b
|
Rust
|
zezic/nona
|
/src/math.rs
|
UTF-8
| 6,749 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
use std::ops::{Mul, MulAssign};
#[derive(Debug, Copy, Clone, Default)]
pub struct Point {
pub x: f32,
pub y: f32,
}
impl Point {
pub fn new(x: f32, y: f32) -> Point {
Point { x, y }
}
pub(crate) fn equals(self, pt: Point, tol: f32) -> bool {
let dx = pt.x - self.x;
let dy = pt.y - self.y;
dx * dx + dy * dy < tol * tol
}
pub(crate) fn dist_pt_seg(self, p: Point, q: Point) -> f32 {
let pqx = q.x - p.x;
let pqy = q.y - p.y;
let dx = self.x - p.x;
let dy = self.y - p.y;
let d = pqx * pqx + pqy * pqy;
let mut t = pqx * dx + pqy * dy;
if d > 0.0 {
t /= d;
}
if t < 0.0 {
t = 0.0
} else if t > 1.0 {
t = 1.0
};
let dx = p.x + t * pqx - self.x;
let dy = p.y + t * pqy - self.y;
dx * dx + dy * dy
}
pub(crate) fn normalize(&mut self) -> f32 {
let d = ((self.x) * (self.x) + (self.y) * (self.y)).sqrt();
if d > 1e-6 {
let id = 1.0 / d;
self.x *= id;
self.y *= id;
}
d
}
pub(crate) fn cross(pt1: Point, pt2: Point) -> f32 {
pt2.x * pt1.y - pt1.x * pt2.y
}
pub fn offset(&self, tx: f32, ty: f32) -> Point {
Point::new(self.x + tx, self.y + ty)
}
}
impl From<(f32, f32)> for Point {
fn from((x, y): (f32, f32)) -> Self {
Point::new(x, y)
}
}
impl From<(i32, i32)> for Point {
fn from((x, y): (i32, i32)) -> Self {
Point::new(x as f32, y as f32)
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Extent {
pub width: f32,
pub height: f32,
}
impl Extent {
pub fn new(width: f32, height: f32) -> Extent {
Extent { width, height }
}
}
impl From<(f32, f32)> for Extent {
fn from((width, height): (f32, f32)) -> Self {
Extent::new(width, height)
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Rect {
pub xy: Point,
pub size: Extent,
}
impl Rect {
pub fn new(xy: Point, size: Extent) -> Rect {
Rect { xy, size }
}
pub fn intersect(self, rect: Rect) -> Rect {
let Rect {
xy: Point { x: ax, y: ay },
size: Extent {
width: aw,
height: ah,
},
} = rect;
let Rect {
xy: Point { x: bx, y: by },
size: Extent {
width: bw,
height: bh,
},
} = rect;
let minx = ax.max(bx);
let miny = ay.max(by);
let maxx = (ax + aw).min(bx + bw);
let maxy = (ay + ah).min(by + bh);
Self::new(
Point::new(minx, miny),
Extent::new((maxx - minx).max(0.0), (maxy - miny).max(0.0)),
)
}
pub fn grow(&self, width: f32, height: f32) -> Rect {
Rect::new(
self.xy.offset(-width / 2.0, -height / 2.0),
Extent::new(self.size.width + width, self.size.height + height),
)
}
}
impl From<(f32, f32, f32, f32)> for Rect {
fn from((x, y, w, h): (f32, f32, f32, f32)) -> Self {
Rect::new((x, y).into(), (w, h).into())
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Bounds {
pub min: Point,
pub max: Point,
}
impl Bounds {
pub fn width(&self) -> f32 {
self.max.x - self.min.x
}
pub fn height(&self) -> f32 {
self.max.y - self.min.y
}
pub fn left_top(&self) -> Point {
self.min
}
pub fn right_top(&self) -> Point {
Point::new(self.max.x, self.min.y)
}
pub fn left_bottom(&self) -> Point {
Point::new(self.min.x, self.max.y)
}
pub fn right_bottom(&self) -> Point {
self.max
}
}
#[derive(Debug, Copy, Clone, Default)]
pub struct Transform(pub [f32; 6]);
impl Transform {
pub fn identity() -> Transform {
Transform([1.0, 0.0, 0.0, 1.0, 0.0, 0.0])
}
pub fn translate(tx: f32, ty: f32) -> Transform {
Transform([1.0, 0.0, 0.0, 1.0, tx, ty])
}
pub fn scale(sx: f32, sy: f32) -> Transform {
Transform([sx, 0.0, 0.0, sy, 0.0, 0.0])
}
pub fn rotate(a: f32) -> Transform {
let cs = a.cos();
let sn = a.sin();
Transform([cs, sn, -sn, cs, 0.0, 0.0])
}
pub fn skew_x(a: f32) -> Transform {
Transform([1.0, 0.0, a.tan(), 1.0, 0.0, 0.0])
}
pub fn skew_y(a: f32) -> Transform {
Transform([1.0, a.tan(), 0.0, 1.0, 0.0, 0.0])
}
pub fn pre_multiply(self, rhs: Self) -> Self {
rhs * self
}
pub fn inverse(self) -> Transform {
let t = &self.0;
let det = t[0] * t[3] - t[2] * t[1];
if det > -1e-6 && det < 1e-6 {
return Transform::identity();
}
let invdet = 1.0 / det;
let mut inv = [0f32; 6];
inv[0] = t[3] * invdet;
inv[2] = -t[2] * invdet;
inv[4] = (t[2] * t[5] - t[3] * t[4]) * invdet;
inv[1] = -t[1] * invdet;
inv[3] = t[0] * invdet;
inv[5] = (t[1] * t[4] - t[0] * t[5]) * invdet;
Transform(inv)
}
pub fn transform_point(&self, pt: Point) -> Point {
let t = &self.0;
Point::new(
pt.x * t[0] + pt.y * t[2] + t[4],
pt.x * t[1] + pt.y * t[3] + t[5],
)
}
pub(crate) fn average_scale(&self) -> f32 {
let t = &self.0;
let sx = (t[0] * t[0] + t[2] * t[2]).sqrt();
let sy = (t[1] * t[1] + t[3] * t[3]).sqrt();
(sx + sy) * 0.5
}
pub(crate) fn font_scale(&self) -> f32 {
let a = self.average_scale();
let d = 0.01f32;
(a / d).ceil() * d
}
}
impl Mul for Transform {
type Output = Transform;
fn mul(mut self, rhs: Self) -> Self::Output {
let t = &mut self.0;
let s = &rhs.0;
let t0 = t[0] * s[0] + t[1] * s[2];
let t2 = t[2] * s[0] + t[3] * s[2];
let t4 = t[4] * s[0] + t[5] * s[2] + s[4];
t[1] = t[0] * s[1] + t[1] * s[3];
t[3] = t[2] * s[1] + t[3] * s[3];
t[5] = t[4] * s[1] + t[5] * s[3] + s[5];
t[0] = t0;
t[2] = t2;
t[4] = t4;
self
}
}
impl MulAssign for Transform {
fn mul_assign(&mut self, rhs: Self) {
*self = *self * rhs;
}
}
impl From<(f32, f32, f32, f32, f32, f32)> for Transform {
fn from((a1, a2, a3, a4, a5, a6): (f32, f32, f32, f32, f32, f32)) -> Self {
Transform([a1, a2, a3, a4, a5, a6])
}
}
impl From<[f32; 6]> for Transform {
fn from(values: [f32; 6]) -> Self {
let mut values2 = [0.0; 6];
for i in 0..6 {
values2[i] = values[i];
}
Transform(values2)
}
}
| true |
6f98af9bdd220da1174e268e27f3830b783d5562
|
Rust
|
singlestore-labs/wasm-eval
|
/src/rust-ml/kmeans/src/main.rs
|
UTF-8
| 981 | 2.5625 | 3 |
[
"Apache-2.0",
"LLVM-exception"
] |
permissive
|
use std::str::FromStr;
use std::str::from_utf8;
use ndarray::Array2;
fn main() {
fit(b"data/iris.data.csv", 2, 2);
}
pub fn fit (csv_content: &[u8], dim: i32, num_clusters: usize) {
let data = read_data(csv_content, dim as usize);
let (_means, _clusters) = rkm::kmeans_lloyd(&data.view(), num_clusters as usize);
// let mut serialized_vec = Vec::new();
// for row in means.genrows() {
// serialized_vec.push(row[0]);
// serialized_vec.push(row[1]);
// }
// return serialized_vec;
}
fn read_data(csv_content: &[u8], dim: usize) -> Array2<f32> {
let mut data_reader = csv::Reader::from_path(from_utf8(&csv_content).unwrap()).unwrap();
let mut data: Vec<f32> = Vec::new();
for record in data_reader.records() {
for field in record.unwrap().iter() {
let value = f32::from_str(field);
data.push(value.unwrap());
}
}
Array2::from_shape_vec((data.len() / dim, dim), data).unwrap()
}
| true |
ff5f05050527f5d70f53c8bb56bb7075434c4b32
|
Rust
|
solana-labs/solana
|
/core/src/sample_performance_service.rs
|
UTF-8
| 3,719 | 2.609375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use {
solana_ledger::{blockstore::Blockstore, blockstore_meta::PerfSampleV2},
solana_runtime::bank_forks::BankForks,
std::{
sync::{
atomic::{AtomicBool, Ordering},
Arc, RwLock,
},
thread::{self, sleep, Builder, JoinHandle},
time::{Duration, Instant},
},
};
const SAMPLE_INTERVAL: u64 = 60;
const SLEEP_INTERVAL: u64 = 500;
pub struct SamplePerformanceService {
thread_hdl: JoinHandle<()>,
}
impl SamplePerformanceService {
pub fn new(
bank_forks: &Arc<RwLock<BankForks>>,
blockstore: Arc<Blockstore>,
exit: Arc<AtomicBool>,
) -> Self {
let bank_forks = bank_forks.clone();
info!("Starting SamplePerformance service");
let thread_hdl = Builder::new()
.name("sample-performance".to_string())
.spawn(move || {
Self::run(bank_forks, blockstore, exit);
})
.unwrap();
Self { thread_hdl }
}
pub fn run(
bank_forks: Arc<RwLock<BankForks>>,
blockstore: Arc<Blockstore>,
exit: Arc<AtomicBool>,
) {
let mut snapshot = StatsSnapshot::from_forks(&bank_forks);
let mut now = Instant::now();
loop {
if exit.load(Ordering::Relaxed) {
break;
}
let elapsed = now.elapsed();
if elapsed.as_secs() >= SAMPLE_INTERVAL {
now = Instant::now();
let new_snapshot = StatsSnapshot::from_forks(&bank_forks);
let (num_transactions, num_non_vote_transactions, num_slots) =
new_snapshot.diff_since(&snapshot);
// Store the new snapshot to compare against in the next iteration of the loop.
snapshot = new_snapshot;
let perf_sample = PerfSampleV2 {
// Note: since num_slots is computed from the highest slot and not the bank
// slot, this value should not be used in conjunction with num_transactions or
// num_non_vote_transactions to draw any conclusions about number of
// transactions per slot.
num_slots,
num_transactions,
num_non_vote_transactions,
sample_period_secs: elapsed.as_secs() as u16,
};
let highest_slot = snapshot.highest_slot;
if let Err(e) = blockstore.write_perf_sample(highest_slot, &perf_sample) {
error!("write_perf_sample failed: slot {:?} {:?}", highest_slot, e);
}
}
sleep(Duration::from_millis(SLEEP_INTERVAL));
}
}
pub fn join(self) -> thread::Result<()> {
self.thread_hdl.join()
}
}
struct StatsSnapshot {
pub num_transactions: u64,
pub num_non_vote_transactions: u64,
pub highest_slot: u64,
}
impl StatsSnapshot {
fn from_forks(forks: &RwLock<BankForks>) -> Self {
let forks = forks.read().unwrap();
let bank = forks.root_bank();
Self {
num_transactions: bank.transaction_count(),
num_non_vote_transactions: bank.non_vote_transaction_count_since_restart(),
highest_slot: forks.highest_slot(),
}
}
fn diff_since(&self, predecessor: &Self) -> (u64, u64, u64) {
(
self.num_transactions
.saturating_sub(predecessor.num_transactions),
self.num_non_vote_transactions
.saturating_sub(predecessor.num_non_vote_transactions),
self.highest_slot.saturating_sub(predecessor.highest_slot),
)
}
}
| true |
a0fc5567ecc32163fab97f4dda16f6e33b3573c0
|
Rust
|
fire-lib/fire-http
|
/fire-http/src/routes/raw_route.rs
|
UTF-8
| 650 | 2.53125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::util::PinnedFuture;
use crate::{Response, Data};
pub use crate::server::HyperRequest;
/// A `RawRoute` is the more powerfull brother/sister to `Route`. It get's
/// executed before `Route`.
/// The `RawRoute` should only be needed if you implement something lower level
/// like websockets and need access to the underlying hyper types.
pub trait RawRoute: Send + Sync {
fn check(&self, req: &HyperRequest) -> bool;
// check if every data you expect is in Data
fn validate_data(&self, _data: &Data) {}
fn call<'a>(
&'a self,
req: &'a mut HyperRequest,
data: &'a Data
) -> PinnedFuture<'a, Option<crate::Result<Response>>>;
}
| true |
73887981ff024ef9ca1893ad1707382640fcd698
|
Rust
|
NivenT/RGB
|
/src/emulator/memory.rs
|
UTF-8
| 6,167 | 3.046875 | 3 |
[] |
no_license
|
use emulator::Mbc;
pub struct Memory {
pub cart: Mbc,
pub bios: Vec<u8>, //Size depends on GB/GBC
pub save_file: String,
pub cgb_mode: bool,
mem: Vec<u8>, //64 KB
wram: Vec<u8>, //32 KB (8 4KB banks)
vram: Vec<u8>, //16 KB (2 8KB banks)
bgp: [u8; 64], //Background Palette Memory
sp: [u8; 64], //Sprite Palette Memory
wram_bank: u8,
key_state: u8,
running_bios: bool
}
impl Memory {
pub fn new() -> Memory {
Memory {
mem: vec![0; 0x10000],
wram: vec![0; 0x8000],
vram: vec![0; 0x4000],
bgp: [0; 64],
sp: [0; 64],
bios: Vec::new(),
cart: Mbc::EMPTY,
save_file: String::new(),
wram_bank: 1,
key_state: 0xFF,
running_bios: true,
cgb_mode: false
}
}
pub fn finished_with_bios(&mut self) {
self.running_bios = false;
}
//read byte
pub fn rb(&self, address: u16) -> u8 {
let address = address as usize;
if 0x100 <= address && address < 0x200 {
/* This area in the GBC BIOS is all 00s
* I assume that means its supposed to be ignored in favor of
** the data in the cart here. I have not found a document
** stating that is the case, but tracing through the BIOS
** code suggests that this is the right thing to do.
* Also, the BIOS runs correctly with this but not without it
*/
self.cart.rb(address)
} else if address < self.bios.len() {
if self.running_bios {self.bios[address]} else {self.cart.rb(address)}
} else if address < 0x8000 {
self.cart.rb(address)
} else if address < 0xA000 {
let bank = if self.cgb_mode {self.rb(0xFF4F)} else {0};
self.vram[bank as usize*0x2000 + address%0x8000]
} else if address < 0xC000 {
self.cart.rb(address)
} else if address < 0xD000 {
self.wram[address - 0xC000]
} else if address < 0xE000 {
self.wram[self.wram_bank as usize*0x1000 + address%0xD000]
} else if 0xFF00 == address {
match self.mem[0xFF00] & 0x30 {
0x10 => 0x10 | (self.key_state >> 4),
0x20 => 0x20 | (self.key_state & 0xF),
_ => 0
}
} else if 0xFF55 == address {
if self.mem[0xFF55] == 0xFF {0xFF} else {self.mem[0xFF55] & 0x7F}
} else if 0xFF69 == address { //Background Palette Data
self.bgp[(self.rb(0xFF68) & 0x3F) as usize]
} else if 0xFF6B == address { //Sprite Palette Data
self.sp[(self.rb(0xFF6A) & 0x3F) as usize]
} else {
self.mem[address]
}
}
//read word
pub fn rw(&self, address: u16) -> u16 {
self.rb(address) as u16 | ((self.rb(address+1) as u16) << 8)
}
//write byte
pub fn wb(&mut self, address: u16, val: u8) {
// TODO: Use match instead?
let address = address as usize;
if 0xFEA0 <= address && address < 0xFF00 {
return;
} else if address < 0x8000 {
return self.cart.wb(address, val);
} else if 0x8000 <= address && address < 0xA000 {
let bank = if self.cgb_mode {self.rb(0xFF4F)} else {0};
return self.vram[bank as usize*0x2000 + address%0x8000] = val;
} else if 0xA000 <= address && address < 0xC000 {
return self.cart.wb(address, val);
} else if 0xC000 <= address && address < 0xD000 {
self.wram[address - 0xC000] = val;
self.mem[address + 0x2000] = val;
} else if 0xD000 <= address && address < 0xE000 {
self.wram[self.wram_bank as usize*0x1000 + address%0xD000] = val;
if address < 0xDE00 {
self.mem[address + 0x2000] = val;
}
} else if 0xE000 <= address && address < 0xFE00 {
self.mem[address - 0x2000] = val;
} else if 0xFF04 == address { //divider register (DIV)
return self.mem[0xFF04] = 0;
} else if 0xFF44 == address { //scanline position
return self.mem[0xFF44] = 0;
} else if 0xFF46 == address { //OAM DMA transfer
let start = (val as u16) << 8;
// NOTE: I think this technically is supposed to happen over multiple cycles instead of all at once
for i in 0..0xA0 {
let copy_val = self.rb(start + i);
self.wb(0xFE00 + i, copy_val);
}
return;
} else if 0xFF4D == address { // Prepare speed switch
let curr_speed = self.mem[0xFF4D] & 0x80;
return self.mem[0xFF4D] = curr_speed | (val & 0x7F);
} else if 0xFF4F == address { //VRAM bank
return self.mem[0xFF4F] = val & 1;
} else if 0xFF55 == address && self.cgb_mode { //VRAM DMA transfer
if (val & (1 << 7)) == 0 {
//General Purpose DMA
let source = (self.rb(0xFF52) as u16 | ((self.rb(0xFF51) as u16) << 8)) & 0xFFF0;
let dest = ((self.rb(0xFF54) as u16 | ((self.rb(0xFF53) as u16) << 8)) & 0x1FF0) | 0x8000;
let length = 0x10*(val as u16 + 1);
for i in 0..length {
let copy_val = self.rb(source + i);
self.wb(dest + i, copy_val);
}
self.mem[0xFF55] = 0xFF;
return;
}
} else if 0xFF69 == address { //Background Palette Data
self.bgp[(self.rb(0xFF68) & 0x3F) as usize] = val;
if (self.rb(0xFF68) >> 7) > 0 {
let old_val = self.rb(0xFF68);
self.wb(0xFF68, (old_val + 1) | (1 << 7));
}
} else if 0xFF6B == address { //Sprite Palette Data
self.sp[(self.rb(0xFF6A) & 0x3F) as usize] = val;
if (self.rb(0xFF6A) >> 7) > 0 {
let old_val = self.rb(0xFF6A);
self.wb(0xFF6A, (old_val + 1) | (1 << 7));
}
} else if 0xFF70 == address { //select wram bank
self.wram_bank = if (val & 7) == 0 || !self.cgb_mode {1} else {val & 7};
}
// TODO: Reconsider if this should be in an else statement/if this function is actually correct
self.mem[address] = val;
}
//write word
pub fn ww(&mut self, address: u16, val: u16) {
self.wb(address, (val & 0x00FF) as u8);
self.wb(address+1, ((val & 0xFF00) >> 8) as u8)
}
//write line (sets the current scanline)
pub fn wl(&mut self, val: u8) {
self.mem[0xFF44] = val;
}
//write keys
pub fn wk(&mut self, key: u8, pressed: bool) {
if pressed {
self.key_state &= !(1 << key);
} else {
self.key_state |= 1 << key;
}
}
pub fn incr_div(&mut self) {
let div = self.mem[0xFF04];
self.mem[0xFF04] = div.wrapping_add(1);
}
pub fn read_vram(&self, address: u16, bank: bool) -> u8 {
self.vram[bank as usize*0x2000 + address as usize%0x8000]
}
pub fn read_bgp(&self, n: usize) -> u8 {
self.bgp[n]
}
pub fn read_sp(&self, n: usize) -> u8 {
self.sp[n]
}
pub fn switch_speed(&mut self) {
self.mem[0xFF4D] ^= 0x80;
}
}
| true |
b92c5127d38a6e3017a93ced6c349a41c7410977
|
Rust
|
joshuaclayton/read-ctags-rs
|
/src/language.rs
|
UTF-8
| 1,626 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
use serde::Serialize;
use std::path::Path;
#[derive(Debug, Copy, Clone, PartialEq, Serialize)]
pub enum Language {
CSS,
Elixir,
Elm,
HTML,
JSON,
JavaScript,
Markdown,
Ruby,
Rust,
SCSS,
Sh,
SVG,
TypeScript,
XML,
}
impl Language {
pub fn from_path(path: &str) -> Option<Language> {
match Path::new(path).extension().and_then(|v| v.to_str()) {
Some("css") => Some(Language::CSS),
Some("ex") => Some(Language::Elixir),
Some("exs") => Some(Language::Elixir),
Some("elm") => Some(Language::Elm),
Some("html") => Some(Language::HTML),
Some("json") => Some(Language::JSON),
Some("js") => Some(Language::JavaScript),
Some("jsx") => Some(Language::JavaScript),
Some("md") => Some(Language::Markdown),
Some("rb") => Some(Language::Ruby),
Some("rs") => Some(Language::Rust),
Some("scss") => Some(Language::SCSS),
Some("svg") => Some(Language::SVG),
Some("ts") => Some(Language::TypeScript),
Some("tsx") => Some(Language::TypeScript),
Some("xml") => Some(Language::XML),
None => Some(Language::Sh),
_ => None,
}
}
}
#[test]
fn calculates_common_files() {
assert_eq!(Language::from_path("../foo/bar.rb"), Some(Language::Ruby));
assert_eq!(Language::from_path("/tmp/foo.md"), Some(Language::Markdown));
assert_eq!(Language::from_path("bin/rails"), Some(Language::Sh));
assert_eq!(Language::from_path("file.unknown"), None);
}
| true |
6970b2db89063afbd62b69a27bd5a54fb1d94581
|
Rust
|
wking/cincinnati
|
/vendor/async-compression/src/tokio/write/generic/decoder.rs
|
UTF-8
| 4,839 | 2.609375 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use core::{
pin::Pin,
task::{Context, Poll},
};
use std::io::{Error, ErrorKind, Result};
use crate::{
codec::Decode,
tokio::write::{AsyncBufWrite, BufWriter},
util::PartialBuffer,
};
use futures_core::ready;
use pin_project_lite::pin_project;
use tokio::io::AsyncWrite;
#[derive(Debug)]
enum State {
Decoding,
Finishing,
Done,
}
pin_project! {
#[derive(Debug)]
pub struct Decoder<W, D: Decode> {
#[pin]
writer: BufWriter<W>,
decoder: D,
state: State,
}
}
impl<W: AsyncWrite, D: Decode> Decoder<W, D> {
pub fn new(writer: W, decoder: D) -> Self {
Self {
writer: BufWriter::new(writer),
decoder,
state: State::Decoding,
}
}
pub fn get_ref(&self) -> &W {
self.writer.get_ref()
}
pub fn get_mut(&mut self) -> &mut W {
self.writer.get_mut()
}
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut W> {
self.project().writer.get_pin_mut()
}
pub fn into_inner(self) -> W {
self.writer.into_inner()
}
fn do_poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
input: &mut PartialBuffer<&[u8]>,
) -> Poll<Result<()>> {
let mut this = self.project();
loop {
let output = ready!(this.writer.as_mut().poll_partial_flush_buf(cx))?;
let mut output = PartialBuffer::new(output);
*this.state = match this.state {
State::Decoding => {
if this.decoder.decode(input, &mut output)? {
State::Finishing
} else {
State::Decoding
}
}
State::Finishing => {
if this.decoder.finish(&mut output)? {
State::Done
} else {
State::Finishing
}
}
State::Done => panic!("Write after end of stream"),
};
let produced = output.written().len();
this.writer.as_mut().produce(produced);
if let State::Done = this.state {
return Poll::Ready(Ok(()));
}
if input.unwritten().is_empty() {
return Poll::Ready(Ok(()));
}
}
}
fn do_poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<()>> {
let mut this = self.project();
loop {
let output = ready!(this.writer.as_mut().poll_partial_flush_buf(cx))?;
let mut output = PartialBuffer::new(output);
let (state, done) = match this.state {
State::Decoding => {
let done = this.decoder.flush(&mut output)?;
(State::Decoding, done)
}
State::Finishing => {
if this.decoder.finish(&mut output)? {
(State::Done, false)
} else {
(State::Finishing, false)
}
}
State::Done => (State::Done, true),
};
*this.state = state;
let produced = output.written().len();
this.writer.as_mut().produce(produced);
if done {
return Poll::Ready(Ok(()));
}
}
}
}
impl<W: AsyncWrite, D: Decode> AsyncWrite for Decoder<W, D> {
fn poll_write(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8]) -> Poll<Result<usize>> {
if buf.is_empty() {
return Poll::Ready(Ok(0));
}
let mut input = PartialBuffer::new(buf);
match self.do_poll_write(cx, &mut input)? {
Poll::Pending if input.written().is_empty() => Poll::Pending,
_ => Poll::Ready(Ok(input.written().len())),
}
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<()>> {
ready!(self.as_mut().do_poll_flush(cx))?;
ready!(self.project().writer.as_mut().poll_flush(cx))?;
Poll::Ready(Ok(()))
}
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<()>> {
if let State::Decoding = self.as_mut().project().state {
*self.as_mut().project().state = State::Finishing;
}
ready!(self.as_mut().do_poll_flush(cx))?;
if let State::Done = self.as_mut().project().state {
ready!(self.as_mut().project().writer.as_mut().poll_shutdown(cx))?;
Poll::Ready(Ok(()))
} else {
Poll::Ready(Err(Error::new(
ErrorKind::Other,
"Attempt to shutdown before finishing input",
)))
}
}
}
| true |
b468c6d5e3f5ed20a5d1f5f483f509ae83e9d897
|
Rust
|
cryptape/rust-numext
|
/constructor/src/fixed_uint/core/builtin/std_convert.rs
|
UTF-8
| 2,189 | 2.671875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
// Copyright 2018-2019 Cryptape Technologies LLC.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Implement built-in traits in [`::std::convert`].
//!
//! Not implement `AsRef` and `AsMut` traits to reduce confusion.
//!
//! [`::std::convert`]: https://doc.rust-lang.org/std/convert/index.html#traits
use crate::fixed_uint::UintConstructor;
use crate::utils;
use quote::quote;
impl UintConstructor {
pub fn impl_traits_std_convert(&self) {
self.impl_traits_std_convert_from_bool();
self.impl_traits_std_convert_from_primitive_uint();
}
fn impl_traits_std_convert_from_bool(&self) {
let name = &self.ts.name;
let part = quote!(
impl ::std::convert::From<bool> for #name {
#[inline]
fn from(val: bool) -> Self {
if val {
Self::one()
} else {
Self::zero()
}
}
}
);
self.implt(part);
}
fn impl_traits_std_convert_from_primitive_uint(&self) {
let name = &self.ts.name;
for prim_bits_size in &[8u64, 16, 32, 64, 128] {
let prim_type = utils::uint_suffix_to_ts(*prim_bits_size);
let func_name = utils::ident_to_ts(format!("_from_u{}", prim_bits_size).as_ref());
let part = quote!(
impl ::std::convert::From<#prim_type> for #name {
#[inline]
fn from(prim: #prim_type) -> Self {
Self::#func_name(prim)
}
}
impl<'a> ::std::convert::From<&'a #prim_type> for #name {
#[inline]
fn from(prim: &#prim_type) -> Self {
Self::#func_name(*prim)
}
}
);
self.implt(part);
}
}
}
| true |
0b5c167df6d1b0398183cbf82b6db12d4b0d82ab
|
Rust
|
tarrows/learn-rust
|
/sqlike/src/main.rs
|
UTF-8
| 4,211 | 3.296875 | 3 |
[] |
no_license
|
use std::io;
use std::io::Write;
use std::process;
#[macro_use] extern crate scan_fmt;
// REMAIN TODO:
// - Re-consider ownership
// - Re-consider input buffer
// - Extract to lib.rs
fn main() {
// Not implemented input buffer: https://cstack.github.io/db_tutorial/parts/part1.html
let mut input_buffer = String::new();
loop {
print_prompt();
// https://stackoverflow.com/questions/34993744/why-does-this-read-input-before-printing
io::stdout().flush().expect("flush failed!");
io::stdin().read_line(&mut input_buffer).expect("failed to read line");
// command.trim_end() : &str
input_buffer = input_buffer.trim_end().to_string();
if input_buffer.starts_with(".") {
match do_meta_command(&input_buffer) {
MetaCommandResult::Exit => {
// https://doc.rust-lang.org/std/process/fn.exit.html
process::exit(0);
}
MetaCommandResult::Success => {
input_buffer.clear();
continue;
},
MetaCommandResult::UnrecognizedCommand(msg) => {
println!("{}", msg);
input_buffer.clear();
continue;
},
};
}
match prepare_statement(&input_buffer) {
PrepareResult::Success(statement) => {
execute_statement(&statement);
},
PrepareResult::UnrecognizedStatement(msg) => {
println!("{}", msg);
input_buffer.clear();
continue;
},
PrepareResult::SyntaxError(msg) => {
println!("{}", msg);
input_buffer.clear();
continue;
},
};
input_buffer.clear();
println!("Executed");
}
}
enum MetaCommandResult {
Success,
Exit,
UnrecognizedCommand(String)
}
enum PrepareResult {
Success(Statement),
UnrecognizedStatement(String),
SyntaxError(String)
}
fn print_prompt() {
print!("db > ");
}
fn do_meta_command(input_buffer: &str) -> MetaCommandResult {
if input_buffer == ".exit" {
MetaCommandResult::Exit
} else if input_buffer == ".test" {
MetaCommandResult::Success
} else {
MetaCommandResult::UnrecognizedCommand(format!("Unrecognized keyword at start of '{}'", input_buffer))
}
}
enum StatementMethod {
Insert(Row),
Select,
}
struct Statement {
method: StatementMethod
}
// const uint32_t COLUMN_USERNAME_SIZE = 32;
// const uint32_t COLUMN_EMAIL_SIZE = 255;
// struct Row_t {
// uint32_t id;
// char username[COLUMN_USERNAME_SIZE];
// char email[COLUMN_EMAIL_SIZE];
// };
// typedef struct Row_t Row;
struct Row {
id: usize,
username: String, // varchar(32)
email: String // varchar(255)
}
fn prepare_statement(input_buffer: &str) -> PrepareResult {
if input_buffer.starts_with("insert") {
// use scan_fmt macro https://docs.rs/scan_fmt/0.1.3/scan_fmt/
let (id, username, email) = scan_fmt!(input_buffer, "insert {d} {} {}", usize, String, String);
match (id, username, email) {
(Some(id), Some(username), Some(email)) => {
let row_to_insert = Row { id, username, email };
let statement = Statement { method: StatementMethod::Insert(row_to_insert) };
PrepareResult::Success(statement)
},
_ => PrepareResult::SyntaxError(String::from("Invalid Arguments"))
}
} else if input_buffer.starts_with("select") {
let statement = Statement { method: StatementMethod::Select };
PrepareResult::Success(statement)
} else {
PrepareResult::UnrecognizedStatement(format!("Unrecognized command '{}'", input_buffer))
}
}
fn execute_statement(statement: &Statement) {
match &statement.method {
StatementMethod::Insert(_row) => {
println!("This is where we would do an insert.");
},
StatementMethod::Select => {
println!("This is where we would do a select.");
},
}
}
| true |
ccc1ab3bdf179a628770798dd8e871652951b0f5
|
Rust
|
colelawrence/shipyard
|
/src/run/storage_borrow.rs
|
UTF-8
| 6,653 | 2.625 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::storage::{AllStorages, Entities, EntitiesMut};
use crate::views::{EntitiesView, EntitiesViewMut, UniqueView, UniqueViewMut, View, ViewMut};
#[cfg(feature = "non_send")]
use crate::NonSend;
#[cfg(all(feature = "non_send", feature = "non_sync"))]
use crate::NonSendSync;
#[cfg(feature = "non_sync")]
use crate::NonSync;
use crate::{error, Unique};
use core::convert::TryInto;
pub trait StorageBorrow<'a> {
type View;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage>;
}
impl<'a> StorageBorrow<'a> for () {
type View = ();
fn try_borrow(_: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
Ok(())
}
}
impl<'a> StorageBorrow<'a> for Entities {
type View = EntitiesView<'a>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
all_storages.try_into()
}
}
impl<'a> StorageBorrow<'a> for EntitiesMut {
type View = EntitiesViewMut<'a>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
all_storages.try_into()
}
}
impl<'a, T: 'static + Send + Sync> StorageBorrow<'a> for &T {
type View = View<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
all_storages.try_into()
}
}
impl<'a, T: 'static + Send + Sync> StorageBorrow<'a> for &mut T {
type View = ViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
all_storages.try_into()
}
}
impl<'a, T: 'static + Send + Sync> StorageBorrow<'a> for Unique<&T> {
type View = UniqueView<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
all_storages.try_into()
}
}
impl<'a, T: 'static + Send + Sync> StorageBorrow<'a> for Unique<&mut T> {
type View = UniqueViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
all_storages.try_into()
}
}
#[cfg(feature = "non_send")]
impl<'a, T: 'static + Sync> StorageBorrow<'a> for NonSend<&T> {
type View = View<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
View::try_storage_from_non_send(all_storages)
}
}
#[cfg(feature = "non_send")]
impl<'a, T: 'static + Sync> StorageBorrow<'a> for NonSend<&mut T> {
type View = ViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
ViewMut::try_storage_from_non_send(all_storages)
}
}
#[cfg(feature = "non_send")]
impl<'a, T: 'static + Sync> StorageBorrow<'a> for Unique<NonSend<&T>> {
type View = UniqueView<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
UniqueView::try_storage_from_non_send(all_storages)
}
}
#[cfg(feature = "non_send")]
impl<'a, T: 'static + Sync> StorageBorrow<'a> for Unique<NonSend<&mut T>> {
type View = UniqueViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
UniqueViewMut::try_storage_from_non_send(all_storages)
}
}
#[cfg(feature = "non_sync")]
impl<'a, T: 'static + Send> StorageBorrow<'a> for NonSync<&T> {
type View = View<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
View::try_storage_from_non_sync(all_storages)
}
}
#[cfg(feature = "non_sync")]
impl<'a, T: 'static + Send> StorageBorrow<'a> for NonSync<&mut T> {
type View = ViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
ViewMut::try_storage_from_non_sync(all_storages)
}
}
#[cfg(feature = "non_sync")]
impl<'a, T: 'static + Send> StorageBorrow<'a> for Unique<NonSync<&T>> {
type View = UniqueView<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
UniqueView::try_storage_from_non_sync(all_storages)
}
}
#[cfg(feature = "non_sync")]
impl<'a, T: 'static + Send> StorageBorrow<'a> for Unique<NonSync<&mut T>> {
type View = UniqueViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
UniqueViewMut::try_storage_from_non_sync(all_storages)
}
}
#[cfg(all(feature = "non_send", feature = "non_sync"))]
impl<'a, T: 'static> StorageBorrow<'a> for NonSendSync<&T> {
type View = View<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
View::try_storage_from_non_send_sync(all_storages)
}
}
#[cfg(all(feature = "non_send", feature = "non_sync"))]
impl<'a, T: 'static> StorageBorrow<'a> for NonSendSync<&mut T> {
type View = ViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
ViewMut::try_storage_from_non_send_sync(all_storages)
}
}
#[cfg(all(feature = "non_send", feature = "non_sync"))]
impl<'a, T: 'static> StorageBorrow<'a> for Unique<NonSendSync<&T>> {
type View = UniqueView<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
UniqueView::try_storage_from_non_send_sync(all_storages)
}
}
#[cfg(all(feature = "non_send", feature = "non_sync"))]
impl<'a, T: 'static> StorageBorrow<'a> for Unique<NonSendSync<&mut T>> {
type View = UniqueViewMut<'a, T>;
fn try_borrow(all_storages: &'a AllStorages) -> Result<Self::View, error::GetStorage> {
UniqueViewMut::try_storage_from_non_send_sync(all_storages)
}
}
macro_rules! impl_system_data {
($(($type: ident, $index: tt))+) => {
impl<'a, $($type: StorageBorrow<'a>),+> StorageBorrow<'a> for ($($type,)+) {
type View = ($($type::View,)+);
fn try_borrow(
storages: &'a AllStorages,
) -> Result<Self::View, error::GetStorage> {
Ok(($(
<$type as StorageBorrow>::try_borrow(storages)?,
)+))
}
}
}
}
macro_rules! system_data {
($(($type: ident, $index: tt))*;($type1: ident, $index1: tt) $(($queue_type: ident, $queue_index: tt))*) => {
impl_system_data![$(($type, $index))*];
system_data![$(($type, $index))* ($type1, $index1); $(($queue_type, $queue_index))*];
};
($(($type: ident, $index: tt))*;) => {
impl_system_data![$(($type, $index))*];
}
}
system_data![(A, 0); (B, 1) (C, 2) (D, 3) (E, 4) (F, 5) (G, 6) (H, 7) (I, 8) (J, 9)];
| true |
4765b4e37fc88b2b764ca3b2e109398595ab6304
|
Rust
|
olson-sean-k/plexus
|
/plexus/src/index.rs
|
UTF-8
| 21,771 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
//! Indexing and aggregation.
//!
//! This module provides types and traits that describe _index buffers_ and
//! _indexers_ that disambiguate vertex data to construct minimal _index_ and
//! _vertex buffers_. Plexus refers to independent vertex and index buffers as
//! _raw buffers_. See the [`buffer`] module and [`MeshBuffer`] type for tools
//! for working with these buffers.
//!
//! # Index Buffers
//!
//! Index buffers describe the topology of a polygonal mesh as ordered groups of
//! indices into a vertex buffer. Each group of indices represents a polygon.
//! The vertex buffer contains data that describes each vertex, such as
//! positions or surface normals. Plexus supports _structured_ and _flat_ index
//! buffers via the [`Grouping`] and [`IndexBuffer`] traits. These traits are
//! implemented for [`Vec`].
//!
//! Flat index buffers contain unstructured indices with an implicit grouping,
//! such as `Vec<usize>`. Arity of these buffers is constant and is described by
//! the [`Flat`] meta-grouping. Rendering pipelines typically expect this
//! format.
//!
//! Structured index buffers contain elements that explicitly group indices,
//! such as `Vec<Trigon<usize>>`. These buffers can be formed from polygonal
//! types in the [`primitive`] module.
//!
//! # Indexers
//!
//! [`Indexer`]s construct index and vertex buffers from iterators of polygonal
//! types in the [`primitive`] module, such as [`NGon`] and
//! [`UnboundedPolygon`]. The [`IndexVertices`] trait provides functions for
//! collecting an iterator of $n$-gons into these buffers.
//!
//! Mesh data structures also implement the [`FromIndexer`] and [`FromIterator`]
//! traits so that iterators of $n$-gons can be collected into these types
//! (using a [`HashIndexer`] by default). A specific [`Indexer`] can be
//! configured using the [`CollectWithIndexer`] trait.
//!
//! # Examples
//!
//! Indexing data for a cube to create raw buffers and a [`MeshBuffer`]:
//!
//! ```rust
//! # extern crate decorum;
//! # extern crate nalgebra;
//! # extern crate plexus;
//! #
//! use decorum::R64;
//! use nalgebra::Point3;
//! use plexus::buffer::MeshBuffer;
//! use plexus::index::{Flat3, HashIndexer};
//! use plexus::prelude::*;
//! use plexus::primitive::cube::Cube;
//! use plexus::primitive::generate::Position;
//!
//! type E3 = Point3<R64>;
//!
//! let (indices, positions) = Cube::new()
//! .polygons::<Position<E3>>()
//! .triangulate()
//! .index_vertices::<Flat3, _>(HashIndexer::default());
//! let buffer = MeshBuffer::<Flat3, E3>::from_raw_buffers(indices, positions).unwrap();
//! ```
//!
//! [`FromIterator`]: std::iter::FromIterator
//! [`Vec`]: std::vec::Vec
//! [`MeshBuffer`]: crate::buffer::MeshBuffer
//! [`buffer`]: crate::buffer
//! [`MeshGraph`]: crate::graph::MeshGraph
//! [`CollectWithIndexer`]: crate::index::CollectWithIndexer
//! [`Flat`]: crate::index::Flat
//! [`FromIndexer`]: crate::index::FromIndexer
//! [`HashIndexer`]: crate::index::HashIndexer
//! [`Indexer`]: crate::index::Indexer
//! [`IndexVertices`]: crate::index::IndexVertices
//! [`NGon`]: crate::primitive::NGon
//! [`UnboundedPolygon`]: crate::primitive::UnboundedPolygon
//! [`primitive`]: crate::primitive
use num::{Integer, NumCast, Unsigned};
use std::cmp;
use std::collections::HashMap;
use std::fmt::Debug;
use std::hash::Hash;
use std::marker::PhantomData;
use theon::adjunct::Map;
use typenum::NonZero;
use crate::constant::{Constant, ToType, TypeOf};
use crate::primitive::decompose::IntoVertices;
use crate::primitive::Topological;
use crate::{Monomorphic, StaticArity};
pub(crate) type BufferOf<R> = Vec<<R as Grouping>::Group>;
pub(crate) type IndexOf<R> = <BufferOf<R> as IndexBuffer<R>>::Index;
// Note that it isn't possible for `IndexBuffer` types to implement
// `DynamicArity`, because they are typically parameterized by `R` (see
// implementations for `Vec<_>`). Instead, `DynamicArity` is implemented for
// `MeshBuffer`, which can bind a `Grouping` and its implementation of
// `StaticArity` with the underlying index buffer type.
/// Index buffer.
///
/// This trait is implemented by types that can be used as an index buffer. The
/// elements in the buffer are determined by a `Grouping`.
///
/// In particular, this trait is implemented by `Vec`, such as `Vec<usize>` or
/// `Vec<Trigon<usize>>`.
pub trait IndexBuffer<R>
where
R: Grouping,
{
/// The type of individual indices in the buffer.
///
/// This type is distinct from the grouping. For example, if an index buffer
/// contains [`Trigon<usize>`][`Trigon`] elements, then this type is `usize`.
///
/// [`Trigon`]: crate::primitive::Trigon
type Index: Copy + Integer + Unsigned;
}
impl<T, const N: usize> IndexBuffer<Flat<T, N>> for Vec<T>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
T: Copy + Integer + Unsigned,
{
type Index = T;
}
impl<P> IndexBuffer<P> for Vec<P>
where
P: Topological,
P::Vertex: Copy + Integer + Unsigned,
{
type Index = P::Vertex;
}
pub trait Push<R, P>: IndexBuffer<R>
where
R: Grouping,
P: Topological<Vertex = Self::Index>,
P::Vertex: Copy + Integer + Unsigned,
{
fn push(&mut self, index: P);
}
impl<T, P, const N: usize> Push<Flat<T, N>, P> for Vec<T>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
T: Copy + Integer + Unsigned,
P: Monomorphic + IntoVertices + Topological<Vertex = T>,
{
fn push(&mut self, index: P) {
for index in index.into_vertices() {
self.push(index);
}
}
}
impl<P, Q> Push<P, Q> for Vec<P>
where
P: From<Q> + Grouping + Topological,
P::Vertex: Copy + Integer + Unsigned,
Q: Topological<Vertex = P::Vertex>,
Self: IndexBuffer<P, Index = P::Vertex>,
{
fn push(&mut self, index: Q) {
self.push(P::from(index));
}
}
pub trait Grouping: StaticArity {
type Group;
}
/// Flat index buffer meta-grouping.
///
/// Describes a flat index buffer with a constant arity. The number of vertices
/// in the indexed topological structures is specified using a constant
/// parameter `N`, which represents the number of grouped elements in the index
/// buffer. For example, `Flat<_, 3>` describes an index buffer with indices in
/// implicit and contiguous groups of three. Note that this constant may be
/// distinct from the arity of the indexed topological structures (i.e., if `N`
/// is less than three, then arity is `N - 1` and may be zero.).
///
/// Unlike structured groupings, this meta-grouping is needed to associate an
/// index type with an implicit grouping and arity. For example, `Vec<usize>`
/// implements both `IndexBuffer<Flat<usize, 3>>` (a triangular buffer) and
/// `IndexBuffer<Flat<usize, 4>>` (a quadrilateral buffer).
///
/// See the [`index`] module documention for more information about index
/// buffers.
///
/// # Examples
///
/// Creating a [`MeshBuffer`] with a flat and triangular index buffer:
///
/// ```rust
/// use plexus::buffer::MeshBuffer;
/// use plexus::index::Flat;
/// use plexus::prelude::*;
///
/// let mut buffer = MeshBuffer::<Flat<usize, 3>, (f64, f64, f64)>::default();
/// ```
///
/// [`MeshBuffer`]: crate::buffer::MeshBuffer
/// [`index`]: crate::index
#[derive(Debug)]
pub struct Flat<T, const N: usize>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
T: Copy + Integer + Unsigned,
{
phantom: PhantomData<fn() -> T>,
}
impl<T, const N: usize> Grouping for Flat<T, N>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
T: Copy + Integer + Unsigned,
{
/// The elements of flat index buffers are indices. These indices are
/// implicitly grouped by the arity of the buffer (`N`).
type Group = T;
}
impl<T, const N: usize> Monomorphic for Flat<T, N>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
T: Copy + Integer + Unsigned,
{
}
impl<T, const N: usize> StaticArity for Flat<T, N>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
T: Copy + Integer + Unsigned,
{
type Static = usize;
const ARITY: Self::Static = crate::n_arity(N);
}
/// Alias for a flat and triangular index buffer.
pub type Flat3<T = usize> = Flat<T, 3>;
/// Alias for a flat and quadrilateral index buffer.
pub type Flat4<T = usize> = Flat<T, 4>;
/// Structured index buffer grouping.
///
/// Describes a structured index buffer containing [`Topological`] types with
/// index data in their vertices.
///
/// # Examples
///
/// Creating a [`MeshBuffer`] with a structured index buffer:
///
/// ```rust
/// use plexus::buffer::MeshBuffer;
/// use plexus::prelude::*;
/// use plexus::primitive::BoundedPolygon;
///
/// let mut buffer = MeshBuffer::<BoundedPolygon<usize>, (f64, f64, f64)>::default();
/// ```
///
/// [`MeshBuffer`]: crate::buffer::MeshBuffer
/// [`Topological`]: crate::primitive::Topological
impl<P> Grouping for P
where
P: Topological,
P::Vertex: Copy + Integer + Unsigned,
{
/// [`Topological`] index buffers contain $n$-gons that explicitly group
/// their indices.
///
/// [`Topological`]: crate::primitive::Topological
type Group = P;
}
/// Vertex indexer.
///
/// Disambiguates arbitrary vertex data and emits a one-to-one mapping of
/// indices to vertices.
pub trait Indexer<T, K>
where
T: Topological,
{
/// Indexes a vertex using a keying function.
///
/// Returns a tuple containing the index and optionally vertex data. Vertex
/// data is only returned if the data has not yet been indexed, otherwise
/// `None` is returned.
fn index<F>(&mut self, vertex: T::Vertex, f: F) -> (usize, Option<T::Vertex>)
where
F: Fn(&T::Vertex) -> &K;
}
/// Hashing vertex indexer.
///
/// This indexer hashes key data for vertices to form an index. This is fast,
/// reliable, and requires no configuration. Prefer this indexer when possible.
///
/// The vertex key data must implement [`Hash`]. Vertex data often includes
/// floating-point values (i.e., `f32` or `f64`), which do not implement
/// [`Hash`]. Types from the [`decorum`] crate can be used to allow
/// floating-point data to be hashed.
///
/// # Examples
///
/// ```rust
/// # extern crate decorum;
/// # extern crate nalgebra;
/// # extern crate plexus;
/// #
/// use decorum::R64;
/// use nalgebra::Point3;
/// use plexus::index::{Flat3, HashIndexer};
/// use plexus::prelude::*;
/// use plexus::primitive::cube::Cube;
/// use plexus::primitive::generate::Position;
///
/// let (indices, positions) = Cube::new()
/// .polygons::<Position<Point3<R64>>>()
/// .triangulate()
/// .index_vertices::<Flat3, _>(HashIndexer::default());
/// ```
///
/// [`decorum`]: https://crates.io/crates/decorum
///
/// [`Hash`]: std::hash::Hash
pub struct HashIndexer<T, K>
where
T: Topological,
K: Clone + Eq + Hash,
{
hash: HashMap<K, usize>,
n: usize,
phantom: PhantomData<fn() -> T>,
}
impl<T, K> HashIndexer<T, K>
where
T: Topological,
K: Clone + Eq + Hash,
{
/// Creates a new `HashIndexer`.
pub fn new() -> Self {
HashIndexer {
hash: HashMap::new(),
n: 0,
phantom: PhantomData,
}
}
}
impl<T, K> Default for HashIndexer<T, K>
where
T: Topological,
K: Clone + Eq + Hash,
{
fn default() -> Self {
HashIndexer::new()
}
}
impl<T, K> Indexer<T, K> for HashIndexer<T, K>
where
T: Topological,
K: Clone + Eq + Hash,
{
fn index<F>(&mut self, input: T::Vertex, f: F) -> (usize, Option<T::Vertex>)
where
F: Fn(&T::Vertex) -> &K,
{
let mut vertex = None;
let mut n = self.n;
let index = self.hash.entry(f(&input).clone()).or_insert_with(|| {
vertex = Some(input);
let m = n;
n += 1;
m
});
self.n = n;
(*index, vertex)
}
}
/// LRU caching vertex indexer.
///
/// This indexer uses a _least recently used_ (LRU) cache to form an index. To
/// function correctly, an adequate cache capacity is necessary. If the capacity
/// is insufficient, then redundant vertex data may be emitted. See
/// [`LruIndexer::with_capacity`].
///
/// This indexer is useful if the vertex key data does not implement [`Hash`].
/// If the key data can be hashed, prefer `HashIndexer` instead.
///
/// # Examples
///
/// ```rust
/// # extern crate nalgebra;
/// # extern crate plexus;
/// #
/// use nalgebra::Point3;
/// use plexus::index::{Flat3, LruIndexer};
/// use plexus::prelude::*;
/// use plexus::primitive::generate::Position;
/// use plexus::primitive::sphere::UvSphere;
///
/// let (indices, positions) = UvSphere::new(8, 8)
/// .polygons::<Position<Point3<f64>>>()
/// .triangulate()
/// .index_vertices::<Flat3, _>(LruIndexer::with_capacity(64));
/// ```
///
/// [`Hash`]: std::hash::Hash
/// [`LruIndexer::with_capacity`]: crate::index::LruIndexer::with_capacity
pub struct LruIndexer<T, K>
where
T: Topological,
K: Clone + PartialEq,
{
lru: Vec<(K, usize)>,
capacity: usize,
n: usize,
phantom: PhantomData<fn() -> T>,
}
impl<T, K> LruIndexer<T, K>
where
T: Topological,
K: Clone + PartialEq,
{
/// Creates a new `LruIndexer` with a default capacity.
pub fn new() -> Self {
LruIndexer::with_capacity(16)
}
/// Creates a new `LruIndexer` with the specified capacity.
///
/// The capacity of the cache must be sufficient in order to generate a
/// unique set of index and vertex data.
pub fn with_capacity(capacity: usize) -> Self {
let capacity = cmp::max(1, capacity);
LruIndexer {
lru: Vec::with_capacity(capacity),
capacity,
n: 0,
phantom: PhantomData,
}
}
fn find(&self, key: &K) -> Option<(usize, usize)> {
self.lru
.iter()
.enumerate()
.find(|&(_, entry)| entry.0 == *key)
.map(|(index, entry)| (index, entry.1))
}
}
impl<T, K> Default for LruIndexer<T, K>
where
T: Topological,
K: Clone + PartialEq,
{
fn default() -> Self {
LruIndexer::new()
}
}
impl<T, K> Indexer<T, K> for LruIndexer<T, K>
where
T: Topological,
K: Clone + PartialEq,
{
fn index<F>(&mut self, input: T::Vertex, f: F) -> (usize, Option<T::Vertex>)
where
F: Fn(&T::Vertex) -> &K,
{
let mut vertex = None;
let key = f(&input).clone();
let index = if let Some(entry) = self.find(&key) {
let vertex = self.lru.remove(entry.0);
self.lru.push(vertex);
entry.1
}
else {
vertex = Some(input);
let m = self.n;
self.n += 1;
if self.lru.len() >= self.capacity {
self.lru.remove(0);
}
self.lru.push((key, m));
m
};
(index, vertex)
}
}
/// Functions for collecting an iterator of $n$-gons into raw index and vertex
/// buffers.
///
/// Unlike [`IndexVertices`], this trait provides functions that are closed (not
/// parameterized) with respect to [`Grouping`]. Instead, the trait is
/// implemented for a particular [`Grouping`]. These functions cannot be used
/// fluently as part of an iterator expression.
///
/// [`Grouping`]: crate::index::Grouping
/// [`IndexVertices`]: crate::index::IndexVertices
pub trait GroupedIndexVertices<R, P>: Sized
where
R: Grouping,
P: Topological,
{
fn index_vertices_with<N, K, F>(self, indexer: N, f: F) -> (Vec<R::Group>, Vec<P::Vertex>)
where
N: Indexer<P, K>,
F: Fn(&P::Vertex) -> &K;
fn index_vertices<N>(self, indexer: N) -> (Vec<R::Group>, Vec<P::Vertex>)
where
N: Indexer<P, P::Vertex>,
{
self.index_vertices_with::<N, P::Vertex, _>(indexer, |vertex| vertex)
}
}
impl<R, P, I> GroupedIndexVertices<R, P> for I
where
I: Iterator<Item = P>,
R: Grouping,
P: Map<IndexOf<R>> + Topological,
P::Output: Topological<Vertex = IndexOf<R>>,
BufferOf<R>: Push<R, P::Output>,
IndexOf<R>: NumCast,
{
fn index_vertices_with<N, K, F>(self, mut indexer: N, f: F) -> (Vec<R::Group>, Vec<P::Vertex>)
where
N: Indexer<P, K>,
F: Fn(&P::Vertex) -> &K,
{
let mut indices = Vec::new();
let mut vertices = Vec::new();
for topology in self {
Push::push(
&mut indices,
topology.map(|vertex| {
let (index, vertex) = indexer.index(vertex, &f);
if let Some(vertex) = vertex {
vertices.push(vertex);
}
NumCast::from(index).unwrap()
}),
);
}
(indices, vertices)
}
}
/// Functions for collecting an iterator of $n$-gons into raw index and vertex
/// buffers.
///
/// Unlike [`GroupedIndexVertices`], this trait provides functions that are
/// parameterized with respect to [`Grouping`].
///
/// See [`HashIndexer`] and [`LruIndexer`].
///
/// # Examples
///
///
/// ```rust
/// # extern crate decorum;
/// # extern crate nalgebra;
/// # extern crate plexus;
/// #
/// use decorum::R64;
/// use nalgebra::Point3;
/// use plexus::index::{Flat3, HashIndexer};
/// use plexus::prelude::*;
/// use plexus::primitive::generate::Position;
/// use plexus::primitive::sphere::UvSphere;
///
/// let sphere = UvSphere::new(32, 32);
/// let (indices, positions) = sphere
/// .polygons::<Position<Point3<R64>>>()
/// .triangulate()
/// .index_vertices::<Flat3, _>(HashIndexer::default());
/// ```
///
/// [`GroupedIndexVertices`]: crate::index::GroupedIndexVertices
/// [`Grouping`]: crate::index::Grouping
/// [`HashIndexer`]: crate::index::HashIndexer
/// [`LruIndexer`]: crate::index::LruIndexer
pub trait IndexVertices<P>
where
P: Topological,
{
/// Indexes an iterator of $n$-gons into raw index and vertex buffers using
/// the given grouping, indexer, and keying function.
fn index_vertices_with<R, N, K, F>(self, indexer: N, f: F) -> (Vec<R::Group>, Vec<P::Vertex>)
where
Self: GroupedIndexVertices<R, P>,
R: Grouping,
N: Indexer<P, K>,
F: Fn(&P::Vertex) -> &K,
{
GroupedIndexVertices::<R, P>::index_vertices_with(self, indexer, f)
}
/// Indexes an iterator of $n$-gons into raw index and vertex buffers using
/// the given grouping and indexer.
///
/// # Examples
///
/// ```rust
/// # extern crate decorum;
/// # extern crate nalgebra;
/// # extern crate plexus;
/// #
/// use decorum::R64;
/// use nalgebra::Point3;
/// use plexus::index::HashIndexer;
/// use plexus::prelude::*;
/// use plexus::primitive::cube::Cube;
/// use plexus::primitive::generate::Position;
/// use plexus::primitive::Trigon;
///
/// // `indices` contains `Trigon`s with index data.
/// let (indices, positions) = Cube::new()
/// .polygons::<Position<Point3<R64>>>()
/// .subdivide()
/// .triangulate()
/// .index_vertices::<Trigon<usize>, _>(HashIndexer::default());
/// ```
fn index_vertices<R, N>(self, indexer: N) -> (Vec<R::Group>, Vec<P::Vertex>)
where
Self: GroupedIndexVertices<R, P>,
R: Grouping,
N: Indexer<P, P::Vertex>,
{
IndexVertices::<P>::index_vertices_with(self, indexer, |vertex| vertex)
}
}
impl<P, I> IndexVertices<P> for I
where
I: Iterator<Item = P>,
P: Topological,
{
}
pub trait FromIndexer<P, Q>: Sized
where
P: Topological,
Q: Topological<Vertex = P::Vertex>,
{
type Error: Debug;
fn from_indexer<I, N>(input: I, indexer: N) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = P>,
N: Indexer<Q, P::Vertex>;
}
/// Functions for collecting an iterator of $n$-gons into a mesh data structure.
///
/// These functions can be used to collect data from an iterator into mesh data
/// structures like [`MeshBuffer`] or [`MeshGraph`].
///
/// See [`HashIndexer`] and [`LruIndexer`].
///
/// [`MeshBuffer`]: crate::buffer::MeshBuffer
/// [`MeshGraph`]: crate::graph::MeshGraph
/// [`HashIndexer`]: crate::index::HashIndexer
/// [`LruIndexer`]: crate::index::LruIndexer
pub trait CollectWithIndexer<P, Q>
where
P: Topological,
Q: Topological<Vertex = P::Vertex>,
{
/// Collects an iterator of $n$-gons into a mesh data structure using the
/// given indexer.
///
/// Unlike `collect`, this function allows the indexer to be specified.
///
/// # Errors
///
/// Returns an error defined by the implementer if the target type cannot be
/// constructed from the indexed vertex data.
///
/// # Examples
///
/// ```rust
/// # extern crate decorum;
/// # extern crate nalgebra;
/// # extern crate plexus;
/// #
/// use decorum::R64;
/// use nalgebra::Point3;
/// use plexus::graph::MeshGraph;
/// use plexus::prelude::*;
/// use plexus::primitive::cube::Cube;
/// use plexus::primitive::generate::Position;
/// use plexus::index::HashIndexer;
///
/// let graph: MeshGraph<Point3<f64>> = Cube::new()
/// .polygons::<Position<Point3<R64>>>()
/// .collect_with_indexer(HashIndexer::default())
/// .unwrap();
fn collect_with_indexer<T, N>(self, indexer: N) -> Result<T, T::Error>
where
T: FromIndexer<P, Q>,
N: Indexer<Q, P::Vertex>;
}
impl<P, Q, I> CollectWithIndexer<P, Q> for I
where
I: Iterator<Item = P>,
P: Topological,
Q: Topological<Vertex = P::Vertex>,
{
fn collect_with_indexer<T, N>(self, indexer: N) -> Result<T, T::Error>
where
T: FromIndexer<P, Q>,
N: Indexer<Q, P::Vertex>,
{
T::from_indexer(self, indexer)
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.